var/home/core/zuul-output/0000755000175000017500000000000015111335545014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015111362724015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006673167615111362715017725 0ustar rootrootNov 25 14:25:06 crc systemd[1]: Starting Kubernetes Kubelet... Nov 25 14:25:07 crc restorecon[4750]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:07 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 14:25:08 crc restorecon[4750]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 25 14:25:09 crc kubenswrapper[4879]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 14:25:09 crc kubenswrapper[4879]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 25 14:25:09 crc kubenswrapper[4879]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 14:25:09 crc kubenswrapper[4879]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 14:25:09 crc kubenswrapper[4879]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 25 14:25:09 crc kubenswrapper[4879]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.404524 4879 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408864 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408889 4879 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408894 4879 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408899 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408905 4879 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408911 4879 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408916 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408921 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408925 4879 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408938 4879 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408942 4879 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408946 4879 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408950 4879 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408954 4879 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408957 4879 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408961 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408965 4879 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408969 4879 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408972 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408976 4879 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408980 4879 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408985 4879 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408990 4879 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408995 4879 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.408999 4879 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409003 4879 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409006 4879 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409010 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409014 4879 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409017 4879 feature_gate.go:330] unrecognized feature gate: Example Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409021 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409025 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409028 4879 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409032 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409036 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409040 4879 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409043 4879 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409047 4879 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409051 4879 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409054 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409062 4879 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409066 4879 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409070 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409073 4879 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409078 4879 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409083 4879 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409087 4879 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409092 4879 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409099 4879 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409104 4879 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409109 4879 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409113 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409141 4879 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409147 4879 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409152 4879 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409160 4879 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409166 4879 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409172 4879 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409177 4879 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409182 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409186 4879 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409190 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409196 4879 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409201 4879 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409205 4879 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409209 4879 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409214 4879 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409218 4879 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409222 4879 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409227 4879 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.409231 4879 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410132 4879 flags.go:64] FLAG: --address="0.0.0.0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410155 4879 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410166 4879 flags.go:64] FLAG: --anonymous-auth="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410174 4879 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410180 4879 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410185 4879 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410192 4879 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410200 4879 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410206 4879 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410213 4879 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410220 4879 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410226 4879 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410231 4879 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410236 4879 flags.go:64] FLAG: --cgroup-root="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410241 4879 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410245 4879 flags.go:64] FLAG: --client-ca-file="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410249 4879 flags.go:64] FLAG: --cloud-config="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410254 4879 flags.go:64] FLAG: --cloud-provider="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410258 4879 flags.go:64] FLAG: --cluster-dns="[]" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410264 4879 flags.go:64] FLAG: --cluster-domain="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410268 4879 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410272 4879 flags.go:64] FLAG: --config-dir="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410277 4879 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410282 4879 flags.go:64] FLAG: --container-log-max-files="5" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410289 4879 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410294 4879 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410299 4879 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410307 4879 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410312 4879 flags.go:64] FLAG: --contention-profiling="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410317 4879 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410322 4879 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410327 4879 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410331 4879 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410338 4879 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410342 4879 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410347 4879 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410352 4879 flags.go:64] FLAG: --enable-load-reader="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410356 4879 flags.go:64] FLAG: --enable-server="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410360 4879 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410367 4879 flags.go:64] FLAG: --event-burst="100" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410372 4879 flags.go:64] FLAG: --event-qps="50" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410379 4879 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410385 4879 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410389 4879 flags.go:64] FLAG: --eviction-hard="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410395 4879 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410400 4879 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410405 4879 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410410 4879 flags.go:64] FLAG: --eviction-soft="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410415 4879 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410419 4879 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410424 4879 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410429 4879 flags.go:64] FLAG: --experimental-mounter-path="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410434 4879 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410438 4879 flags.go:64] FLAG: --fail-swap-on="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410443 4879 flags.go:64] FLAG: --feature-gates="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410449 4879 flags.go:64] FLAG: --file-check-frequency="20s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410454 4879 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410459 4879 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410465 4879 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410470 4879 flags.go:64] FLAG: --healthz-port="10248" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410477 4879 flags.go:64] FLAG: --help="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410482 4879 flags.go:64] FLAG: --hostname-override="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410488 4879 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410494 4879 flags.go:64] FLAG: --http-check-frequency="20s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410499 4879 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410504 4879 flags.go:64] FLAG: --image-credential-provider-config="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410509 4879 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410514 4879 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410518 4879 flags.go:64] FLAG: --image-service-endpoint="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410523 4879 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410528 4879 flags.go:64] FLAG: --kube-api-burst="100" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410533 4879 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410538 4879 flags.go:64] FLAG: --kube-api-qps="50" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410544 4879 flags.go:64] FLAG: --kube-reserved="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410550 4879 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410554 4879 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410560 4879 flags.go:64] FLAG: --kubelet-cgroups="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410564 4879 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410569 4879 flags.go:64] FLAG: --lock-file="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410574 4879 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410579 4879 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410585 4879 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410593 4879 flags.go:64] FLAG: --log-json-split-stream="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410598 4879 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410603 4879 flags.go:64] FLAG: --log-text-split-stream="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410608 4879 flags.go:64] FLAG: --logging-format="text" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410613 4879 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410618 4879 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410623 4879 flags.go:64] FLAG: --manifest-url="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410627 4879 flags.go:64] FLAG: --manifest-url-header="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410635 4879 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410640 4879 flags.go:64] FLAG: --max-open-files="1000000" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410651 4879 flags.go:64] FLAG: --max-pods="110" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410656 4879 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410661 4879 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410666 4879 flags.go:64] FLAG: --memory-manager-policy="None" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410671 4879 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410676 4879 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410681 4879 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410705 4879 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410723 4879 flags.go:64] FLAG: --node-status-max-images="50" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410728 4879 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410733 4879 flags.go:64] FLAG: --oom-score-adj="-999" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410738 4879 flags.go:64] FLAG: --pod-cidr="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410743 4879 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410761 4879 flags.go:64] FLAG: --pod-manifest-path="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410767 4879 flags.go:64] FLAG: --pod-max-pids="-1" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410773 4879 flags.go:64] FLAG: --pods-per-core="0" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410778 4879 flags.go:64] FLAG: --port="10250" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410783 4879 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410788 4879 flags.go:64] FLAG: --provider-id="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410793 4879 flags.go:64] FLAG: --qos-reserved="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410798 4879 flags.go:64] FLAG: --read-only-port="10255" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410804 4879 flags.go:64] FLAG: --register-node="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410809 4879 flags.go:64] FLAG: --register-schedulable="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410814 4879 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410825 4879 flags.go:64] FLAG: --registry-burst="10" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410830 4879 flags.go:64] FLAG: --registry-qps="5" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410836 4879 flags.go:64] FLAG: --reserved-cpus="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410841 4879 flags.go:64] FLAG: --reserved-memory="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410848 4879 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410853 4879 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410858 4879 flags.go:64] FLAG: --rotate-certificates="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410864 4879 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410869 4879 flags.go:64] FLAG: --runonce="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410874 4879 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410879 4879 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410884 4879 flags.go:64] FLAG: --seccomp-default="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410889 4879 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410894 4879 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410900 4879 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410905 4879 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410911 4879 flags.go:64] FLAG: --storage-driver-password="root" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410916 4879 flags.go:64] FLAG: --storage-driver-secure="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410921 4879 flags.go:64] FLAG: --storage-driver-table="stats" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410928 4879 flags.go:64] FLAG: --storage-driver-user="root" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410932 4879 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410938 4879 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410945 4879 flags.go:64] FLAG: --system-cgroups="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410950 4879 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410960 4879 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410965 4879 flags.go:64] FLAG: --tls-cert-file="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410970 4879 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410976 4879 flags.go:64] FLAG: --tls-min-version="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410982 4879 flags.go:64] FLAG: --tls-private-key-file="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410987 4879 flags.go:64] FLAG: --topology-manager-policy="none" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410992 4879 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.410997 4879 flags.go:64] FLAG: --topology-manager-scope="container" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.411003 4879 flags.go:64] FLAG: --v="2" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.411011 4879 flags.go:64] FLAG: --version="false" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.411019 4879 flags.go:64] FLAG: --vmodule="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.411026 4879 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.411031 4879 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411178 4879 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411186 4879 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411190 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411208 4879 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411212 4879 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411216 4879 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411220 4879 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411224 4879 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411230 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411234 4879 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411238 4879 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411243 4879 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411247 4879 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411251 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411256 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411260 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411266 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411271 4879 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411277 4879 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411281 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411285 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411289 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411293 4879 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411298 4879 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411302 4879 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411306 4879 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411310 4879 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411313 4879 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411318 4879 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411321 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411325 4879 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411329 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411333 4879 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411337 4879 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411340 4879 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411344 4879 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411349 4879 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411355 4879 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411362 4879 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411366 4879 feature_gate.go:330] unrecognized feature gate: Example Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411370 4879 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411375 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411378 4879 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411384 4879 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411388 4879 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411392 4879 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411396 4879 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411400 4879 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411405 4879 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411417 4879 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411421 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411424 4879 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411428 4879 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411432 4879 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411437 4879 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411441 4879 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411445 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411449 4879 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411452 4879 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411456 4879 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411460 4879 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411465 4879 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411470 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411474 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411478 4879 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411482 4879 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411486 4879 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411491 4879 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411497 4879 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411501 4879 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.411506 4879 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.411522 4879 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.426025 4879 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.426096 4879 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426285 4879 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426304 4879 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426314 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426324 4879 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426332 4879 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426343 4879 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426356 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426365 4879 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426376 4879 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426387 4879 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426396 4879 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426404 4879 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426412 4879 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426421 4879 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426431 4879 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426441 4879 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426450 4879 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426459 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426467 4879 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426475 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426483 4879 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426491 4879 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426499 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426507 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426516 4879 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426524 4879 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426531 4879 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426539 4879 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426547 4879 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426555 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426563 4879 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426571 4879 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426578 4879 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426590 4879 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426599 4879 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426608 4879 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426616 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426624 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426631 4879 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426639 4879 feature_gate.go:330] unrecognized feature gate: Example Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426647 4879 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426657 4879 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426668 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426676 4879 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426684 4879 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426692 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426700 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426707 4879 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426717 4879 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426724 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426732 4879 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426740 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426748 4879 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426756 4879 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426764 4879 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426773 4879 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426780 4879 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426788 4879 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426795 4879 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426804 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426811 4879 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426819 4879 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426828 4879 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426835 4879 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426843 4879 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426851 4879 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426859 4879 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426867 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426875 4879 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426883 4879 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.426891 4879 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.426905 4879 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427173 4879 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427187 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427197 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427207 4879 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427216 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427224 4879 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427232 4879 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427240 4879 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427248 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427256 4879 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427264 4879 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427272 4879 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427279 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427289 4879 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427297 4879 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427305 4879 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427312 4879 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427320 4879 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427327 4879 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427339 4879 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427352 4879 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427362 4879 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427371 4879 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427379 4879 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427387 4879 feature_gate.go:330] unrecognized feature gate: Example Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427396 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427403 4879 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427411 4879 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427418 4879 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427426 4879 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427434 4879 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427442 4879 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427449 4879 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427457 4879 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427467 4879 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427476 4879 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427483 4879 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427491 4879 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427499 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427507 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427515 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427523 4879 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427531 4879 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427539 4879 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427546 4879 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427554 4879 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427563 4879 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427570 4879 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427578 4879 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427586 4879 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427594 4879 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427604 4879 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427612 4879 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427620 4879 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427630 4879 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427641 4879 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427651 4879 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427661 4879 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427671 4879 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427681 4879 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427690 4879 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427700 4879 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427708 4879 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427716 4879 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427725 4879 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427733 4879 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427742 4879 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427750 4879 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427758 4879 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427765 4879 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.427773 4879 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.427786 4879 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.428076 4879 server.go:940] "Client rotation is on, will bootstrap in background" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.434062 4879 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.434215 4879 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.435719 4879 server.go:997] "Starting client certificate rotation" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.435752 4879 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.436987 4879 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-06 19:14:13.847083865 +0000 UTC Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.437151 4879 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1012h49m4.409966728s for next certificate rotation Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.464093 4879 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.466585 4879 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.485769 4879 log.go:25] "Validated CRI v1 runtime API" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.531067 4879 log.go:25] "Validated CRI v1 image API" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.533605 4879 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.540246 4879 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-25-14-20-22-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.540284 4879 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:45 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.559851 4879 manager.go:217] Machine: {Timestamp:2025-11-25 14:25:09.556234175 +0000 UTC m=+1.159647286 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:613b3c94-49dd-4904-bdaf-1b6a10ba13f7 BootID:fb0103eb-cf5f-48db-9ca5-77a21e79fac0 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:45 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:2e:66:56 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:2e:66:56 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:4b:c7:a5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a7:5c:37 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:4b:1c:97 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:bd:49:62 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:c2:a8:47 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:66:67:83:a0:cb:bf Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:70:83:db:79:92 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.560197 4879 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.560406 4879 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.561752 4879 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.561969 4879 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.562015 4879 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.562307 4879 topology_manager.go:138] "Creating topology manager with none policy" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.562321 4879 container_manager_linux.go:303] "Creating device plugin manager" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.562884 4879 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.562919 4879 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.563225 4879 state_mem.go:36] "Initialized new in-memory state store" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.563335 4879 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.568079 4879 kubelet.go:418] "Attempting to sync node with API server" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.568106 4879 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.568139 4879 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.568156 4879 kubelet.go:324] "Adding apiserver pod source" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.568171 4879 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.573409 4879 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.574494 4879 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.575312 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.575431 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.575381 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.575515 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.577600 4879 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.578930 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.578958 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.578966 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.578974 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.578988 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.578996 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.579004 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.579018 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.579027 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.579035 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.579046 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.579054 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.580062 4879 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.580582 4879 server.go:1280] "Started kubelet" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.581870 4879 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.582376 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.581871 4879 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 25 14:25:09 crc systemd[1]: Started Kubernetes Kubelet. Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.583315 4879 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.584760 4879 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.584806 4879 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.585556 4879 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 02:36:52.341676702 +0000 UTC Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.585675 4879 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 156h11m42.756004466s for next certificate rotation Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.585760 4879 server.go:460] "Adding debug handlers to kubelet server" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.585827 4879 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.585984 4879 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.588444 4879 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.589381 4879 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.589635 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.589732 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.590068 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="200ms" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.591471 4879 factory.go:55] Registering systemd factory Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.591506 4879 factory.go:221] Registration of the systemd container factory successfully Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.599605 4879 factory.go:153] Registering CRI-O factory Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.599919 4879 factory.go:221] Registration of the crio container factory successfully Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.600615 4879 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.599651 4879 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187b460f11aaf369 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 14:25:09.580551017 +0000 UTC m=+1.183964088,LastTimestamp:2025-11-25 14:25:09.580551017 +0000 UTC m=+1.183964088,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.600749 4879 factory.go:103] Registering Raw factory Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.600794 4879 manager.go:1196] Started watching for new ooms in manager Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.601474 4879 manager.go:319] Starting recovery of all containers Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605306 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605376 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605394 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605408 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605421 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605435 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605447 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605460 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605471 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605480 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605490 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605526 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605539 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605555 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605569 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605578 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605588 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605600 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605609 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605619 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605629 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605639 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605650 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605660 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605672 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605706 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605722 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605732 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605742 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605752 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605764 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605774 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605785 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605795 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605806 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.605815 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.607971 4879 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608008 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608023 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608036 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608051 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608065 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608101 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608115 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608156 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608168 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608184 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608200 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608212 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608225 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608292 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608306 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608321 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608340 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608354 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608368 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608384 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608398 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608412 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608461 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608476 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608489 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608511 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608526 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608539 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.608552 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609459 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609491 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609504 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609516 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609528 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609543 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609555 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609568 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609580 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609595 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609607 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609620 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609633 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609646 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609658 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609669 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609711 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609726 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609738 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609752 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609764 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609775 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609787 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609798 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609811 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609822 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609834 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609845 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609862 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609874 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609885 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609896 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609909 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609923 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609936 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609950 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609963 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609976 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.609995 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610018 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610034 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610049 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610064 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610080 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610098 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610112 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610145 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610159 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610171 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610185 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610198 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610208 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610221 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610234 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610248 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610262 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610276 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610289 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610302 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610315 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610331 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610387 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610401 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610416 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610427 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610438 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610450 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610461 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610473 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610486 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610499 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610513 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610526 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610538 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610552 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610564 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610577 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610589 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610600 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610612 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610623 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610635 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610647 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610659 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610673 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610686 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610697 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610709 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610720 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610731 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610742 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610756 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610777 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610788 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610805 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610818 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610835 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610847 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610859 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610873 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610885 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610900 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610914 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610926 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610940 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610953 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610966 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610979 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.610994 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611005 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611018 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611029 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611041 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611053 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611066 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611086 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611099 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611111 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611145 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611165 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611178 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611191 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611210 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611225 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611243 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611257 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611270 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611284 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611298 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611314 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611328 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611344 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611359 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611373 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611386 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611400 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611414 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611433 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611447 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611462 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611475 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611488 4879 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611499 4879 reconstruct.go:97] "Volume reconstruction finished" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.611507 4879 reconciler.go:26] "Reconciler: start to sync state" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.618914 4879 manager.go:324] Recovery completed Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.633747 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.636527 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.636578 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.636590 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.638365 4879 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.638393 4879 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.638415 4879 state_mem.go:36] "Initialized new in-memory state store" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.640605 4879 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.643323 4879 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.643401 4879 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.643443 4879 kubelet.go:2335] "Starting kubelet main sync loop" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.643607 4879 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 25 14:25:09 crc kubenswrapper[4879]: W1125 14:25:09.644635 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.644707 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.665514 4879 policy_none.go:49] "None policy: Start" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.666611 4879 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.666639 4879 state_mem.go:35] "Initializing new in-memory state store" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.685985 4879 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.744710 4879 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.744985 4879 manager.go:334] "Starting Device Plugin manager" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.745043 4879 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.745058 4879 server.go:79] "Starting device plugin registration server" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.745663 4879 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.745684 4879 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.745943 4879 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.746035 4879 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.746044 4879 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.755426 4879 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.790908 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="400ms" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.845935 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.847446 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.847488 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.847505 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.847535 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:09 crc kubenswrapper[4879]: E1125 14:25:09.848276 4879 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.945269 4879 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.945413 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.948605 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.948673 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.948686 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.948885 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.949098 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.949205 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950150 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950208 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950223 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950385 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950599 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950677 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950702 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950736 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.950825 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.951988 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952016 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952269 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952357 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952377 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952621 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952838 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.952910 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954110 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954160 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954172 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954303 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954497 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954532 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954558 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954589 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.954614 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.955207 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.955253 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.955295 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.955589 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.955663 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.957036 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.957075 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.957108 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.957112 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.957158 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:09 crc kubenswrapper[4879]: I1125 14:25:09.957190 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.016544 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.016648 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.016836 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.016949 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017001 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017088 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017440 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017479 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017506 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.017534 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.031463 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.031566 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.031598 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.031618 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.049222 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.050793 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.050891 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.050922 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.050982 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.051774 4879 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132612 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132690 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132711 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132733 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132759 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132778 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132800 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132825 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132864 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132905 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132875 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132970 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132966 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133000 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133022 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133037 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132957 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133068 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133048 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.132949 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133160 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133188 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133166 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133002 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133023 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133242 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133223 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.133367 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.192408 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="800ms" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.294047 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.316613 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.335786 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.358298 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.364623 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.426985 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-22f073d07950733c9b1aa33bd748784955f3d514eec8ddf16330e755826c380e WatchSource:0}: Error finding container 22f073d07950733c9b1aa33bd748784955f3d514eec8ddf16330e755826c380e: Status 404 returned error can't find the container with id 22f073d07950733c9b1aa33bd748784955f3d514eec8ddf16330e755826c380e Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.428380 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-0d78b01d8cc8a59e24bf47569912cc5b1b04b7bb3abc9f20228d83de31059ef9 WatchSource:0}: Error finding container 0d78b01d8cc8a59e24bf47569912cc5b1b04b7bb3abc9f20228d83de31059ef9: Status 404 returned error can't find the container with id 0d78b01d8cc8a59e24bf47569912cc5b1b04b7bb3abc9f20228d83de31059ef9 Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.439710 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-90eeae3f4596d00b7222e1af1115db45059f59dd33f674b9c0df5a9c3b61cc80 WatchSource:0}: Error finding container 90eeae3f4596d00b7222e1af1115db45059f59dd33f674b9c0df5a9c3b61cc80: Status 404 returned error can't find the container with id 90eeae3f4596d00b7222e1af1115db45059f59dd33f674b9c0df5a9c3b61cc80 Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.440662 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-d0fb45b181c9cf125cea79b6a18a922d4e8d500742686973f76dfb47c9b0ad58 WatchSource:0}: Error finding container d0fb45b181c9cf125cea79b6a18a922d4e8d500742686973f76dfb47c9b0ad58: Status 404 returned error can't find the container with id d0fb45b181c9cf125cea79b6a18a922d4e8d500742686973f76dfb47c9b0ad58 Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.441510 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-26fd30fe7f37a37d0bb72def48b290866eff7251573483645a4222ba79f9d049 WatchSource:0}: Error finding container 26fd30fe7f37a37d0bb72def48b290866eff7251573483645a4222ba79f9d049: Status 404 returned error can't find the container with id 26fd30fe7f37a37d0bb72def48b290866eff7251573483645a4222ba79f9d049 Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.452902 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.454247 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.454282 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.454296 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.454328 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.454834 4879 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.583719 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.585797 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.585899 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.649423 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"22f073d07950733c9b1aa33bd748784955f3d514eec8ddf16330e755826c380e"} Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.650695 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0d78b01d8cc8a59e24bf47569912cc5b1b04b7bb3abc9f20228d83de31059ef9"} Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.652007 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d0fb45b181c9cf125cea79b6a18a922d4e8d500742686973f76dfb47c9b0ad58"} Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.653074 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"26fd30fe7f37a37d0bb72def48b290866eff7251573483645a4222ba79f9d049"} Nov 25 14:25:10 crc kubenswrapper[4879]: I1125 14:25:10.653900 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"90eeae3f4596d00b7222e1af1115db45059f59dd33f674b9c0df5a9c3b61cc80"} Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.660620 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.660692 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:10 crc kubenswrapper[4879]: W1125 14:25:10.744191 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.744321 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:10 crc kubenswrapper[4879]: E1125 14:25:10.993542 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="1.6s" Nov 25 14:25:11 crc kubenswrapper[4879]: W1125 14:25:11.174065 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:11 crc kubenswrapper[4879]: E1125 14:25:11.174541 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.255624 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.258158 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.258196 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.258209 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.258241 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:11 crc kubenswrapper[4879]: E1125 14:25:11.267662 4879 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.583764 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.660253 4879 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a" exitCode=0 Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.660351 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a"} Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.660812 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.662612 4879 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="11fc0f38f7da4a19a83f0f6e66254b7da945d36d201dbc5788a17d0308d8b2c9" exitCode=0 Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.662693 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"11fc0f38f7da4a19a83f0f6e66254b7da945d36d201dbc5788a17d0308d8b2c9"} Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.662783 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.662985 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.663024 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.663036 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.663957 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.664016 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.664035 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.666087 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480" exitCode=0 Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.666203 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.666201 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480"} Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.667538 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.667601 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.667620 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.669645 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf"} Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.671639 4879 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada" exitCode=0 Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.671705 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada"} Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.671783 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.672072 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.672986 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.673044 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.673065 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.673520 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.673588 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:11 crc kubenswrapper[4879]: I1125 14:25:11.673605 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.583724 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:12 crc kubenswrapper[4879]: E1125 14:25:12.594847 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="3.2s" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.677525 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.677624 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.677646 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.677629 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.678674 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.678727 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.678738 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.679421 4879 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151" exitCode=0 Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.679503 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.679508 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.680229 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.680261 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.680274 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.682157 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"bfa415ecff44458d81a7687e4043aa2dcba979cc5ef9acecc09fe347d10127e0"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.682163 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.682800 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.682831 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.682848 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.685328 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.685384 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.688536 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.688568 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.688595 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2"} Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.688648 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.690208 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.690242 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.690254 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:12 crc kubenswrapper[4879]: W1125 14:25:12.801501 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:12 crc kubenswrapper[4879]: E1125 14:25:12.802036 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.868548 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.872400 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.872448 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.872462 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:12 crc kubenswrapper[4879]: I1125 14:25:12.872490 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:12 crc kubenswrapper[4879]: E1125 14:25:12.872949 4879 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.190:6443: connect: connection refused" node="crc" Nov 25 14:25:13 crc kubenswrapper[4879]: W1125 14:25:13.104360 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:13 crc kubenswrapper[4879]: E1125 14:25:13.104467 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:13 crc kubenswrapper[4879]: W1125 14:25:13.106867 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:13 crc kubenswrapper[4879]: E1125 14:25:13.106912 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.583862 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.694859 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91"} Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.694938 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd"} Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.694959 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d"} Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.694964 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.696405 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.696449 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.696461 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.701109 4879 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1" exitCode=0 Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.701282 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.701340 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.701516 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.701686 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.702241 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.701264 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1"} Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703218 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703255 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703270 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703255 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703322 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703338 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703421 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703451 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703465 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703748 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703772 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:13 crc kubenswrapper[4879]: I1125 14:25:13.703836 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:13 crc kubenswrapper[4879]: W1125 14:25:13.761627 4879 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:13 crc kubenswrapper[4879]: E1125 14:25:13.761756 4879 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.190:6443: connect: connection refused" logger="UnhandledError" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.425789 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.584213 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.190:6443: connect: connection refused Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.708031 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624"} Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.708097 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7"} Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.708114 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba"} Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.708138 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.708173 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.708146 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.709306 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.709355 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.709369 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.709322 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.709415 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.709426 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:14 crc kubenswrapper[4879]: I1125 14:25:14.830727 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.697936 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.713619 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.716253 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91" exitCode=255 Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.716340 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91"} Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.716509 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.717924 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.717955 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.717965 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.718557 4879 scope.go:117] "RemoveContainer" containerID="fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.721701 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce"} Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.721767 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818"} Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.721920 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.722911 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.722941 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:15 crc kubenswrapper[4879]: I1125 14:25:15.722950 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.073843 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.075081 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.075110 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.075134 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.075158 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.300050 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.728456 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.730588 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6"} Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.730665 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.730665 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.732097 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.732369 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.732552 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.732232 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.732735 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:16 crc kubenswrapper[4879]: I1125 14:25:16.732758 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.425269 4879 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.425908 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.456563 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.456855 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.458703 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.458773 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.458796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.620923 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.733165 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.733232 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.733165 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.734268 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.734579 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.734611 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.734625 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.734434 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:17 crc kubenswrapper[4879]: I1125 14:25:17.734689 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:18 crc kubenswrapper[4879]: I1125 14:25:18.736261 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:18 crc kubenswrapper[4879]: I1125 14:25:18.737502 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:18 crc kubenswrapper[4879]: I1125 14:25:18.737592 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:18 crc kubenswrapper[4879]: I1125 14:25:18.737615 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.288681 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.288935 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.290478 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.290529 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.290543 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.307862 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.738679 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.738806 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.739571 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.739629 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:19 crc kubenswrapper[4879]: I1125 14:25:19.739642 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:19 crc kubenswrapper[4879]: E1125 14:25:19.756057 4879 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 25 14:25:20 crc kubenswrapper[4879]: I1125 14:25:20.198460 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:20 crc kubenswrapper[4879]: I1125 14:25:20.740622 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:20 crc kubenswrapper[4879]: I1125 14:25:20.741656 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:20 crc kubenswrapper[4879]: I1125 14:25:20.741697 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:20 crc kubenswrapper[4879]: I1125 14:25:20.741709 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.743104 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.744385 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.744460 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.744474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.748288 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.815048 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.815637 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.817908 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.817956 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:21 crc kubenswrapper[4879]: I1125 14:25:21.817969 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:22 crc kubenswrapper[4879]: I1125 14:25:22.744861 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:22 crc kubenswrapper[4879]: I1125 14:25:22.745780 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:22 crc kubenswrapper[4879]: I1125 14:25:22.745804 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:22 crc kubenswrapper[4879]: I1125 14:25:22.745812 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:25 crc kubenswrapper[4879]: I1125 14:25:25.584580 4879 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 25 14:25:25 crc kubenswrapper[4879]: I1125 14:25:25.682971 4879 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 25 14:25:25 crc kubenswrapper[4879]: I1125 14:25:25.683063 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 25 14:25:25 crc kubenswrapper[4879]: I1125 14:25:25.688270 4879 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 25 14:25:25 crc kubenswrapper[4879]: I1125 14:25:25.688344 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.425446 4879 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.426019 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.631979 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.632285 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.633870 4879 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.633942 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.634222 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.634253 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.634264 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.643407 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.758816 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.759709 4879 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.759802 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.760081 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.760147 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:27 crc kubenswrapper[4879]: I1125 14:25:27.760169 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:29 crc kubenswrapper[4879]: E1125 14:25:29.756639 4879 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 25 14:25:30 crc kubenswrapper[4879]: E1125 14:25:30.661944 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.666134 4879 trace.go:236] Trace[1224858093]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 14:25:18.775) (total time: 11890ms): Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[1224858093]: ---"Objects listed" error: 11890ms (14:25:30.665) Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[1224858093]: [11.890304522s] [11.890304522s] END Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.666218 4879 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.666689 4879 trace.go:236] Trace[794425981]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 14:25:18.926) (total time: 11739ms): Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[794425981]: ---"Objects listed" error: 11739ms (14:25:30.666) Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[794425981]: [11.739751413s] [11.739751413s] END Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.666721 4879 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.668592 4879 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.668649 4879 trace.go:236] Trace[112040112]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 14:25:18.525) (total time: 12142ms): Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[112040112]: ---"Objects listed" error: 12142ms (14:25:30.668) Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[112040112]: [12.142953332s] [12.142953332s] END Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.668665 4879 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.669657 4879 trace.go:236] Trace[1239326093]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 14:25:18.400) (total time: 12268ms): Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[1239326093]: ---"Objects listed" error: 12268ms (14:25:30.669) Nov 25 14:25:30 crc kubenswrapper[4879]: Trace[1239326093]: [12.268805103s] [12.268805103s] END Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.669680 4879 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 25 14:25:30 crc kubenswrapper[4879]: E1125 14:25:30.670016 4879 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.973588 4879 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Nov 25 14:25:30 crc kubenswrapper[4879]: I1125 14:25:30.973729 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.578180 4879 apiserver.go:52] "Watching apiserver" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.582186 4879 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.582501 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-gnxj7","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.582906 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.583171 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.583281 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.583332 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.583376 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.583480 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.583599 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.583635 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.583722 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.583754 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.584304 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.584704 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.584956 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.585067 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.585804 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.585954 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.586023 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.587974 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.588132 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.588288 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.588417 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.588672 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.589973 4879 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.605228 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.616550 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.630081 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.640437 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.654586 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.665037 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675329 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675379 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675417 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675443 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675470 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675494 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675518 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675543 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675567 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675592 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675615 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675636 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675659 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675682 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675709 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675730 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675756 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675783 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675774 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675805 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675833 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675856 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675878 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675898 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675917 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675935 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675953 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675963 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675972 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.675982 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676023 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677726 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678063 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678113 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676311 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676302 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676401 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676390 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676440 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676480 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676598 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678254 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676638 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676740 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677011 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677149 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678355 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677185 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.676894 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677253 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677288 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677366 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677619 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.677649 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678030 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678396 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678493 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678134 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678651 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678690 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.678955 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679004 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679039 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679086 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679130 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679161 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679189 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679217 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679246 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679533 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679587 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679611 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679634 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679665 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679695 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679724 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679752 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679753 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679779 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679810 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679835 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679864 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679893 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679918 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679938 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679957 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679978 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679997 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680018 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680040 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680062 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680081 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680104 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680159 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680181 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680205 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680228 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680249 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680269 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680292 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680321 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680350 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680378 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680408 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680438 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680472 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680502 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680535 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680564 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680594 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680625 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680654 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680675 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680695 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680716 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680735 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680754 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680776 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680797 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680825 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680846 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680867 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680887 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680905 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680924 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680945 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679032 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681595 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681627 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681658 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681678 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681702 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681725 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681743 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681801 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682812 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682856 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682883 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683040 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683079 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683103 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683148 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683175 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683312 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683332 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683351 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683369 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683390 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683409 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683427 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683444 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683462 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683480 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683497 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683517 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683534 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683552 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683568 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683586 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683604 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683620 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683637 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683656 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683674 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683690 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683707 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683726 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683742 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683760 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683776 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683820 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683839 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683857 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683878 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683898 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683922 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683944 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683964 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683984 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684007 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684041 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684067 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684087 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684108 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684187 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684208 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684228 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684245 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684263 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684281 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684300 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684319 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684338 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684358 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684379 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684423 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684445 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684467 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684493 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684516 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684537 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684560 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684580 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684599 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684617 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684636 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684652 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684669 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684686 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684701 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684718 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684734 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684751 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684766 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684783 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684800 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684817 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684836 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684852 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684871 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684889 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684907 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684924 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684942 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684960 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684978 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685105 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685161 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685185 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685221 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685249 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685293 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685314 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685337 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bcc19ab6-d0dc-465d-90c3-1599a3b4bba0-hosts-file\") pod \"node-resolver-gnxj7\" (UID: \"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\") " pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685361 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mx5l\" (UniqueName: \"kubernetes.io/projected/bcc19ab6-d0dc-465d-90c3-1599a3b4bba0-kube-api-access-6mx5l\") pod \"node-resolver-gnxj7\" (UID: \"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\") " pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685382 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685425 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685444 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685464 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685545 4879 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685557 4879 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685568 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685578 4879 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685588 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685598 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685609 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685618 4879 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685628 4879 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685638 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685649 4879 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685659 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685671 4879 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685681 4879 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685691 4879 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685701 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685710 4879 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685722 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685733 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685746 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685756 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685768 4879 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685783 4879 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685796 4879 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685808 4879 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685824 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685838 4879 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685850 4879 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685864 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685874 4879 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685885 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686737 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.679911 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.687521 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680200 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.680782 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681177 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681372 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.681370 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682029 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682150 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682189 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682598 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682638 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682672 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.682669 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.683230 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684153 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684373 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684570 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.684884 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685097 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685480 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685784 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685801 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685892 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.685976 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686190 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686411 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686719 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686738 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686739 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.686870 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.687080 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.687442 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688040 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688330 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688366 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688399 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688615 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688734 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.687514 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.688913 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:25:32.188886672 +0000 UTC m=+23.792299733 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.688973 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.689349 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.689417 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.689614 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.689996 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.690027 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.690408 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.690612 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.690779 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.690358 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.691520 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.691579 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.691589 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.691686 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692052 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692070 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692392 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692459 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692640 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692658 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692815 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.692471 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.693182 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.693262 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.693345 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.693741 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694311 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694334 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694348 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694434 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694937 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694657 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.694596 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.695203 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.695284 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:32.195263142 +0000 UTC m=+23.798676423 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.695368 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.695397 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.695558 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.695910 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.695928 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.696033 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.696456 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.696535 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.696882 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.697147 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.697323 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.698633 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.699156 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.699513 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.699861 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.700219 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.700655 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.700677 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.700978 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.701185 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.701655 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.701918 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.701471 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.702032 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.702549 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.703226 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.703657 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.703712 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.703761 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.703926 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:32.203886525 +0000 UTC m=+23.807299606 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.703996 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.704634 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.705339 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.706644 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.706899 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.707629 4879 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.707704 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.708044 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.709075 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.710670 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.711678 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.711701 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.711718 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.711813 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:32.211770888 +0000 UTC m=+23.815183959 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.713871 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.715252 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.715309 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.715386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717091 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717440 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717469 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717473 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717529 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717564 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717580 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717758 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717765 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.717925 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.721482 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.725964 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.726627 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.727344 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.727468 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.727486 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.727546 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.727563 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:32.227539473 +0000 UTC m=+23.830952754 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.730308 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.731923 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.732694 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.733096 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.736602 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.738611 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.727354 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.739425 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.740091 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.742982 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.743548 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.745611 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.753562 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.754256 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.754358 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.754405 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.754407 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.754611 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.755273 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.755547 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.755812 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.756855 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.757231 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.757303 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.757364 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.757407 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.759240 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.759431 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.763438 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.763519 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.763727 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.763837 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.764148 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.764280 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.764504 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.766005 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.766559 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.768642 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.769679 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.769768 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.770240 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.772913 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.775664 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6" exitCode=255 Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.775808 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6"} Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.775914 4879 scope.go:117] "RemoveContainer" containerID="fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.780610 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786421 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786494 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786520 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bcc19ab6-d0dc-465d-90c3-1599a3b4bba0-hosts-file\") pod \"node-resolver-gnxj7\" (UID: \"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\") " pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786546 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mx5l\" (UniqueName: \"kubernetes.io/projected/bcc19ab6-d0dc-465d-90c3-1599a3b4bba0-kube-api-access-6mx5l\") pod \"node-resolver-gnxj7\" (UID: \"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\") " pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786638 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786655 4879 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786667 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786678 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786689 4879 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786701 4879 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786714 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786727 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786742 4879 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786753 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786764 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786774 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786785 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786796 4879 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786807 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786818 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786829 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786841 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786852 4879 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786864 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786876 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786887 4879 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786898 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786909 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.786923 4879 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787066 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787083 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787096 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787107 4879 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787146 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787162 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787173 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787183 4879 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787193 4879 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787203 4879 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787212 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787222 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787232 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787241 4879 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787252 4879 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787261 4879 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787271 4879 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787280 4879 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787288 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787296 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787306 4879 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787317 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787326 4879 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787335 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787343 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787352 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787361 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787370 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787379 4879 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787388 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787397 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787407 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787416 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787426 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787436 4879 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787445 4879 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787453 4879 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787464 4879 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787476 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787485 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787494 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787504 4879 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787513 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787522 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787530 4879 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787538 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787650 4879 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787679 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.787711 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788312 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bcc19ab6-d0dc-465d-90c3-1599a3b4bba0-hosts-file\") pod \"node-resolver-gnxj7\" (UID: \"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\") " pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788735 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788768 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788784 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788799 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788813 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788826 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788839 4879 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788882 4879 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788900 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788912 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788948 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788969 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788981 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.788994 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789030 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789044 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789106 4879 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789182 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789197 4879 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789210 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789226 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789240 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789275 4879 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789289 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789322 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789338 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789352 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789528 4879 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789549 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789563 4879 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789581 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789595 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789576 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789609 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789728 4879 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789741 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789755 4879 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789766 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789778 4879 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789803 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789814 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789827 4879 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789849 4879 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789861 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789873 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789885 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789899 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789911 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789924 4879 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789939 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789952 4879 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789964 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789977 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.789992 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790004 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790015 4879 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790026 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790038 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790050 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790062 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790074 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790085 4879 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790095 4879 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790106 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790148 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790162 4879 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790175 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790189 4879 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790201 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790214 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790227 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790239 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790251 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790263 4879 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790273 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790285 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790296 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790308 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790319 4879 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790330 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790341 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790354 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790367 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790379 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790398 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790423 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.790434 4879 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.791036 4879 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.791062 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.791075 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.799363 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.800068 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.801810 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.806964 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mx5l\" (UniqueName: \"kubernetes.io/projected/bcc19ab6-d0dc-465d-90c3-1599a3b4bba0-kube-api-access-6mx5l\") pod \"node-resolver-gnxj7\" (UID: \"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\") " pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.812882 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.829465 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.846071 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.854574 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.856434 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.868083 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.869231 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.879955 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.893295 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.893352 4879 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.895508 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.900572 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.903060 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 14:25:31 crc kubenswrapper[4879]: W1125 14:25:31.913377 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-5bc88872e16fc80c85e4b67663e8a8e4485f3f43a347ba22a728be4b2df61644 WatchSource:0}: Error finding container 5bc88872e16fc80c85e4b67663e8a8e4485f3f43a347ba22a728be4b2df61644: Status 404 returned error can't find the container with id 5bc88872e16fc80c85e4b67663e8a8e4485f3f43a347ba22a728be4b2df61644 Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.913605 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.914364 4879 scope.go:117] "RemoveContainer" containerID="2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6" Nov 25 14:25:31 crc kubenswrapper[4879]: E1125 14:25:31.914554 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.915272 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.915493 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gnxj7" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.915569 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.934040 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.950482 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: W1125 14:25:31.952092 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcc19ab6_d0dc_465d_90c3_1599a3b4bba0.slice/crio-fc1cf648a6b9a85deb36c92da1fbb4375f0175ec1a33de36b025ef0631a5d1d5 WatchSource:0}: Error finding container fc1cf648a6b9a85deb36c92da1fbb4375f0175ec1a33de36b025ef0631a5d1d5: Status 404 returned error can't find the container with id fc1cf648a6b9a85deb36c92da1fbb4375f0175ec1a33de36b025ef0631a5d1d5 Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.961924 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.976330 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:31 crc kubenswrapper[4879]: I1125 14:25:31.989057 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:15Z\\\",\\\"message\\\":\\\"W1125 14:25:13.895739 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1125 14:25:13.896244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1764080713 cert, and key in /tmp/serving-cert-3064616007/serving-signer.crt, /tmp/serving-cert-3064616007/serving-signer.key\\\\nI1125 14:25:14.399740 1 observer_polling.go:159] Starting file observer\\\\nW1125 14:25:14.405513 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1125 14:25:14.405712 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1125 14:25:14.414109 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3064616007/tls.crt::/tmp/serving-cert-3064616007/tls.key\\\\\\\"\\\\nF1125 14:25:15.024176 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.004979 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.013915 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.019175 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.034971 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.043513 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.060273 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.071073 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.084104 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.099701 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-64t7t"] Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.100081 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.103286 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.103337 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.103337 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.103496 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.103937 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.114970 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.128262 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.139657 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.148446 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.158039 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.184750 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.194972 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.195038 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-mcd-auth-proxy-config\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.195085 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-rootfs\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.195109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dhfk\" (UniqueName: \"kubernetes.io/projected/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-kube-api-access-2dhfk\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.195167 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-proxy-tls\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.195286 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:25:33.195238151 +0000 UTC m=+24.798651222 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.198102 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:15Z\\\",\\\"message\\\":\\\"W1125 14:25:13.895739 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1125 14:25:13.896244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1764080713 cert, and key in /tmp/serving-cert-3064616007/serving-signer.crt, /tmp/serving-cert-3064616007/serving-signer.key\\\\nI1125 14:25:14.399740 1 observer_polling.go:159] Starting file observer\\\\nW1125 14:25:14.405513 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1125 14:25:14.405712 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1125 14:25:14.414109 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3064616007/tls.crt::/tmp/serving-cert-3064616007/tls.key\\\\\\\"\\\\nF1125 14:25:15.024176 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.212369 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.229429 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.241757 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296193 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296243 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-rootfs\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296273 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dhfk\" (UniqueName: \"kubernetes.io/projected/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-kube-api-access-2dhfk\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296329 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296353 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-proxy-tls\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296376 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-mcd-auth-proxy-config\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296405 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296442 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296484 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296500 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296552 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296567 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:33.29654626 +0000 UTC m=+24.899959401 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296572 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296591 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.296599 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-rootfs\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296620 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:33.296612912 +0000 UTC m=+24.900025983 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296671 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296719 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:33.296703065 +0000 UTC m=+24.900116146 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296929 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.296964 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:33.296954892 +0000 UTC m=+24.900367973 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.297836 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-mcd-auth-proxy-config\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.302333 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-proxy-tls\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.316263 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dhfk\" (UniqueName: \"kubernetes.io/projected/1f8529f4-b6ae-4467-ad94-67b1113f9d6b-kube-api-access-2dhfk\") pod \"machine-config-daemon-64t7t\" (UID: \"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\") " pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.415677 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:25:32 crc kubenswrapper[4879]: W1125 14:25:32.428170 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f8529f4_b6ae_4467_ad94_67b1113f9d6b.slice/crio-45937a6bc958337ebb7a98237b0cd5747fe9ac39895f51878ab77b6719c6117e WatchSource:0}: Error finding container 45937a6bc958337ebb7a98237b0cd5747fe9ac39895f51878ab77b6719c6117e: Status 404 returned error can't find the container with id 45937a6bc958337ebb7a98237b0cd5747fe9ac39895f51878ab77b6719c6117e Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.479404 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-8m8g8"] Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.479775 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.480799 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-4xdgc"] Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.481343 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.482159 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.482287 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.482467 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.482593 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.482706 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.484049 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.494431 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498496 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498770 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-kubelet\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498799 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv4fz\" (UniqueName: \"kubernetes.io/projected/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-kube-api-access-bv4fz\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498822 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-system-cni-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498837 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-multus-certs\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498855 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cnibin\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498872 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498899 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-conf-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498924 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-cni-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498939 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-httx4\" (UniqueName: \"kubernetes.io/projected/f1eafdec-4c5a-4e91-97b4-a117c35838d4-kube-api-access-httx4\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498952 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-os-release\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498965 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-k8s-cni-cncf-io\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.498993 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-cni-multus\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499008 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-daemon-config\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499023 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f1eafdec-4c5a-4e91-97b4-a117c35838d4-cni-binary-copy\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499036 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-etc-kubernetes\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499050 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cni-binary-copy\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499064 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-cni-bin\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499077 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-system-cni-dir\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499090 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-socket-dir-parent\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499104 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-netns\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499140 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-os-release\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499160 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-cnibin\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499173 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-hostroot\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.499187 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.518055 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.531663 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.544239 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.560023 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.573921 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.585877 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.594878 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600297 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f1eafdec-4c5a-4e91-97b4-a117c35838d4-cni-binary-copy\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600331 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-daemon-config\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600347 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-etc-kubernetes\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600362 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cni-binary-copy\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600379 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-system-cni-dir\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600394 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-cni-bin\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600413 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-socket-dir-parent\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600429 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-netns\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600446 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-os-release\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600496 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-cnibin\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600518 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-hostroot\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600538 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600556 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-kubelet\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600576 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cnibin\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600595 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv4fz\" (UniqueName: \"kubernetes.io/projected/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-kube-api-access-bv4fz\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600611 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-system-cni-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600626 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-multus-certs\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600642 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600670 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-conf-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600695 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-os-release\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600710 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-cni-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600724 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-httx4\" (UniqueName: \"kubernetes.io/projected/f1eafdec-4c5a-4e91-97b4-a117c35838d4-kube-api-access-httx4\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600739 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-k8s-cni-cncf-io\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600763 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-cni-multus\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.600806 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-cni-multus\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601111 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f1eafdec-4c5a-4e91-97b4-a117c35838d4-cni-binary-copy\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601179 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-kubelet\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601213 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-etc-kubernetes\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601343 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-daemon-config\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-conf-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601566 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-cni-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601563 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-k8s-cni-cncf-io\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601631 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-var-lib-cni-bin\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601563 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cnibin\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601601 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-netns\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601638 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-system-cni-dir\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601700 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-system-cni-dir\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601710 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-multus-socket-dir-parent\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601753 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-hostroot\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601718 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-host-run-multus-certs\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601787 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cni-binary-copy\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601790 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-cnibin\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601837 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.601948 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f1eafdec-4c5a-4e91-97b4-a117c35838d4-os-release\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.602020 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-os-release\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.602429 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.611594 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.617328 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-httx4\" (UniqueName: \"kubernetes.io/projected/f1eafdec-4c5a-4e91-97b4-a117c35838d4-kube-api-access-httx4\") pod \"multus-8m8g8\" (UID: \"f1eafdec-4c5a-4e91-97b4-a117c35838d4\") " pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.624923 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv4fz\" (UniqueName: \"kubernetes.io/projected/ac63f434-7d8f-471d-8f1e-e1dc48ecb71a-kube-api-access-bv4fz\") pod \"multus-additional-cni-plugins-4xdgc\" (UID: \"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\") " pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.628510 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:15Z\\\",\\\"message\\\":\\\"W1125 14:25:13.895739 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1125 14:25:13.896244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1764080713 cert, and key in /tmp/serving-cert-3064616007/serving-signer.crt, /tmp/serving-cert-3064616007/serving-signer.key\\\\nI1125 14:25:14.399740 1 observer_polling.go:159] Starting file observer\\\\nW1125 14:25:14.405513 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1125 14:25:14.405712 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1125 14:25:14.414109 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3064616007/tls.crt::/tmp/serving-cert-3064616007/tls.key\\\\\\\"\\\\nF1125 14:25:15.024176 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.637913 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.643741 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.643872 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.649353 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.660930 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.672172 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.683459 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.700931 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.711584 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.720767 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.732649 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.757924 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.780667 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.780739 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.780754 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e6b844a21f3ca3474f7ab52c2b1882416c676998ffb4566f967727a821054065"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.782300 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.782364 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"5bc88872e16fc80c85e4b67663e8a8e4485f3f43a347ba22a728be4b2df61644"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.783976 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.786268 4879 scope.go:117] "RemoveContainer" containerID="2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6" Nov 25 14:25:32 crc kubenswrapper[4879]: E1125 14:25:32.786447 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.788620 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"558ce88ecb12d5adcafa7bb875c80abf6fb13726a7b0f9637607537c47dc4dd4"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.789725 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gnxj7" event={"ID":"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0","Type":"ContainerStarted","Data":"cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.789752 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gnxj7" event={"ID":"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0","Type":"ContainerStarted","Data":"fc1cf648a6b9a85deb36c92da1fbb4375f0175ec1a33de36b025ef0631a5d1d5"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.792236 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.792277 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.792294 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"45937a6bc958337ebb7a98237b0cd5747fe9ac39895f51878ab77b6719c6117e"} Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.792709 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8m8g8" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.797170 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe8ece3759a360653aac7e0ba7903d08f55d30905c1bc07d15bcc7e3dc3ffa91\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:15Z\\\",\\\"message\\\":\\\"W1125 14:25:13.895739 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1125 14:25:13.896244 1 crypto.go:601] Generating new CA for check-endpoints-signer@1764080713 cert, and key in /tmp/serving-cert-3064616007/serving-signer.crt, /tmp/serving-cert-3064616007/serving-signer.key\\\\nI1125 14:25:14.399740 1 observer_polling.go:159] Starting file observer\\\\nW1125 14:25:14.405513 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1125 14:25:14.405712 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1125 14:25:14.414109 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3064616007/tls.crt::/tmp/serving-cert-3064616007/tls.key\\\\\\\"\\\\nF1125 14:25:15.024176 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:32Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.801054 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.828321 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:32Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.864068 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f9p"] Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.865110 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.876526 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:32Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.881420 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.900649 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902788 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5301133b-1830-45bc-a55e-7c3e97907bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902832 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902863 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-etc-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902894 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-slash\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902912 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-var-lib-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902930 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902958 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-netd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902978 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-config\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.902998 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-systemd-units\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903015 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-env-overrides\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903042 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-kubelet\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903064 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-script-lib\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903087 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-node-log\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903104 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-netns\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903141 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-log-socket\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-ovn\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903179 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-bin\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903198 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903217 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-systemd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.903239 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7zrd\" (UniqueName: \"kubernetes.io/projected/5301133b-1830-45bc-a55e-7c3e97907bb9-kube-api-access-k7zrd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.920596 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.941518 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.962798 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 25 14:25:32 crc kubenswrapper[4879]: I1125 14:25:32.980876 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.001906 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.003851 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-env-overrides\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.003936 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-kubelet\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.003963 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-script-lib\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004009 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-node-log\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004030 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-netns\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004048 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-log-socket\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004090 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-ovn\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004112 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-bin\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004202 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004261 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-systemd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004281 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7zrd\" (UniqueName: \"kubernetes.io/projected/5301133b-1830-45bc-a55e-7c3e97907bb9-kube-api-access-k7zrd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004325 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5301133b-1830-45bc-a55e-7c3e97907bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004350 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004404 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-etc-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004420 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-env-overrides\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004427 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-slash\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004484 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-var-lib-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004497 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-slash\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004520 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004557 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-netd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004574 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-config\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004569 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-kubelet\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004616 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-systemd-units\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004682 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-systemd-units\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004709 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-var-lib-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004732 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004754 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-netd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.004925 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-log-socket\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005026 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-node-log\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005053 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-netns\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005286 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-ovn-kubernetes\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005340 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-ovn\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005343 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-config\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005372 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-etc-openvswitch\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005394 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-systemd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005406 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-bin\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005562 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-script-lib\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.005634 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.008935 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5301133b-1830-45bc-a55e-7c3e97907bb9-ovn-node-metrics-cert\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.058212 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7zrd\" (UniqueName: \"kubernetes.io/projected/5301133b-1830-45bc-a55e-7c3e97907bb9-kube-api-access-k7zrd\") pod \"ovnkube-node-g7f9p\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.077603 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.114351 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.161787 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.188767 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.206826 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.207197 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:25:35.207164118 +0000 UTC m=+26.810577189 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.212786 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:33 crc kubenswrapper[4879]: W1125 14:25:33.225093 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5301133b_1830_45bc_a55e_7c3e97907bb9.slice/crio-7a8a20c2820e9c7250b9ce0fd0df521c9cdd98d5cad5bd9c5d36adbaae7d1076 WatchSource:0}: Error finding container 7a8a20c2820e9c7250b9ce0fd0df521c9cdd98d5cad5bd9c5d36adbaae7d1076: Status 404 returned error can't find the container with id 7a8a20c2820e9c7250b9ce0fd0df521c9cdd98d5cad5bd9c5d36adbaae7d1076 Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.235051 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.273290 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.308113 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.308171 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.308195 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308212 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.308223 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308273 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:35.308252951 +0000 UTC m=+26.911666022 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308350 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308388 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:35.308377834 +0000 UTC m=+26.911790905 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308431 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308485 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308431 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308524 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308537 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308504 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308622 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:35.30859457 +0000 UTC m=+26.912007651 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.308643 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:35.308634921 +0000 UTC m=+26.912048002 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.317253 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.351383 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.391644 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.434730 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.475570 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.510668 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.549335 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.643981 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.644046 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.644225 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:33 crc kubenswrapper[4879]: E1125 14:25:33.644395 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.648723 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.649651 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.650584 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.651392 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.653111 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.653820 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.654595 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.655840 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.656679 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.657813 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.658545 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.659975 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.660719 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.662412 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.663175 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.663839 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.665518 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.666390 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.667184 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.668895 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.669743 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.671370 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.671964 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.673525 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.674296 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.675783 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.676695 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.677216 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.678409 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.679032 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.680348 4879 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.680483 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.682777 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.684391 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.685009 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.687198 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.688053 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.689648 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.690568 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.692194 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.692876 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.695848 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.697283 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.698080 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.698692 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.699800 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.701004 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.701942 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.702617 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.704049 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.704643 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.707773 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.708541 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.709115 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.798428 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" exitCode=0 Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.798520 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.799020 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"7a8a20c2820e9c7250b9ce0fd0df521c9cdd98d5cad5bd9c5d36adbaae7d1076"} Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.801192 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac63f434-7d8f-471d-8f1e-e1dc48ecb71a" containerID="4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd" exitCode=0 Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.801285 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerDied","Data":"4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd"} Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.801320 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerStarted","Data":"36f8b3157d25e8443dd30372aa35e1f08ded4305c67b8178165712b045e406b5"} Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.806393 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerStarted","Data":"9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3"} Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.806538 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerStarted","Data":"2c183fa622141aaf6116e3949d2553a95632ba38bd086eed7259a093b229afaf"} Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.827745 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.848487 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.881941 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.903393 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.919486 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.933893 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.957147 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.972412 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:33 crc kubenswrapper[4879]: I1125 14:25:33.987716 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.001387 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:33Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.020215 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.035905 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.071890 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.114155 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.161442 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.192585 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.228760 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.275620 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.308267 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.353799 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.391444 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.432821 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.434102 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.437363 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.465020 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.493354 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.529867 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.570431 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.615461 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.644309 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:34 crc kubenswrapper[4879]: E1125 14:25:34.644451 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.657726 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.691559 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.730381 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-c8p75"] Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.730798 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.731679 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.739798 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.761980 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.780352 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.801230 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.810964 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.813556 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.813604 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.813621 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.813631 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.813640 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.815325 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerStarted","Data":"235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93"} Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.850906 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.888224 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.921515 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsbjn\" (UniqueName: \"kubernetes.io/projected/b6837743-8dfe-448b-988a-54e78cdd5c57-kube-api-access-hsbjn\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.921568 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b6837743-8dfe-448b-988a-54e78cdd5c57-serviceca\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.921615 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6837743-8dfe-448b-988a-54e78cdd5c57-host\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.935047 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:34 crc kubenswrapper[4879]: I1125 14:25:34.971201 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:34Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.011447 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.022166 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsbjn\" (UniqueName: \"kubernetes.io/projected/b6837743-8dfe-448b-988a-54e78cdd5c57-kube-api-access-hsbjn\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.022207 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b6837743-8dfe-448b-988a-54e78cdd5c57-serviceca\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.022236 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6837743-8dfe-448b-988a-54e78cdd5c57-host\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.022320 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b6837743-8dfe-448b-988a-54e78cdd5c57-host\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.023283 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b6837743-8dfe-448b-988a-54e78cdd5c57-serviceca\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.063428 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsbjn\" (UniqueName: \"kubernetes.io/projected/b6837743-8dfe-448b-988a-54e78cdd5c57-kube-api-access-hsbjn\") pod \"node-ca-c8p75\" (UID: \"b6837743-8dfe-448b-988a-54e78cdd5c57\") " pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.074089 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.116900 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.152085 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.186273 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.223970 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.224301 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:25:39.224280971 +0000 UTC m=+30.827694042 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.232654 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.268961 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.309439 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.325013 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.325314 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325197 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.325448 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325458 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:39.325438686 +0000 UTC m=+30.928851757 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.325625 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325635 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325768 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325834 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325937 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:39.325922409 +0000 UTC m=+30.929335480 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325658 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.326072 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.326140 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.325701 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.326216 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:39.326207807 +0000 UTC m=+30.929620878 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.326369 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:39.326359251 +0000 UTC m=+30.929772322 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.343328 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-c8p75" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.348987 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.390406 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.438188 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.473215 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.508240 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.550720 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.589785 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.630532 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.644033 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.644191 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.644591 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:35 crc kubenswrapper[4879]: E1125 14:25:35.644651 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.667543 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.712074 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.755828 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.795426 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.822704 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.824813 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac63f434-7d8f-471d-8f1e-e1dc48ecb71a" containerID="235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93" exitCode=0 Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.824881 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerDied","Data":"235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93"} Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.826705 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-c8p75" event={"ID":"b6837743-8dfe-448b-988a-54e78cdd5c57","Type":"ContainerStarted","Data":"948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e"} Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.826742 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-c8p75" event={"ID":"b6837743-8dfe-448b-988a-54e78cdd5c57","Type":"ContainerStarted","Data":"e7f559b0fa1ce286504bf29a8f81112200d7aa96316e12be7c3db86252f4e351"} Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.829161 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.872286 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.908519 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.948794 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:35 crc kubenswrapper[4879]: I1125 14:25:35.989432 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:35Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.039072 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.071758 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.107967 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.151449 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.191632 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.229142 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.266924 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.309103 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.349896 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.390375 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.432293 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.474262 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.644439 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:36 crc kubenswrapper[4879]: E1125 14:25:36.644627 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.834484 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac63f434-7d8f-471d-8f1e-e1dc48ecb71a" containerID="571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c" exitCode=0 Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.834590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerDied","Data":"571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c"} Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.852827 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.881440 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.899905 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.914234 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.931349 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.946874 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.959788 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.973661 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:36 crc kubenswrapper[4879]: I1125 14:25:36.986914 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:36Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.003633 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.025520 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.040674 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.050715 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.062154 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.070494 4879 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.072350 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.072555 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.072583 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.072594 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.072703 4879 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.103256 4879 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.103566 4879 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.104735 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.104757 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.104768 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.104788 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.104797 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.117588 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.121225 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.121266 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.121277 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.121293 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.121303 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.133680 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.137249 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.137278 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.137286 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.137300 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.137309 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.149994 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.153974 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.154029 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.154042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.154059 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.154071 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.167723 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.171405 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.171439 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.171447 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.171462 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.171471 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.184292 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.184456 4879 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.187178 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.187209 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.187224 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.187242 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.187255 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.289407 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.289446 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.289459 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.289480 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.289491 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.392355 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.392394 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.392403 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.392417 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.392427 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.494747 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.495325 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.495414 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.495486 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.495552 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.597893 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.597926 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.597937 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.597954 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.597964 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.644408 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.644496 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.644617 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:37 crc kubenswrapper[4879]: E1125 14:25:37.644707 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.699807 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.699846 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.699855 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.699869 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.699878 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.802561 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.802647 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.802658 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.802677 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.802689 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.841553 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac63f434-7d8f-471d-8f1e-e1dc48ecb71a" containerID="6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885" exitCode=0 Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.842001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerDied","Data":"6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.846312 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.860537 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.876429 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.897338 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.905145 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.905222 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.905301 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.905327 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.905339 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:37Z","lastTransitionTime":"2025-11-25T14:25:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.922367 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.936563 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.945615 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.956384 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.970109 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.983319 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:37 crc kubenswrapper[4879]: I1125 14:25:37.993985 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:37Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.007402 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.007967 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.008094 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.008194 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.008274 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.008362 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.019086 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.034779 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.051635 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.070705 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.110596 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.110645 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.110656 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.110674 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.110683 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.213555 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.213603 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.213613 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.213629 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.213640 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.316007 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.316153 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.316181 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.316209 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.316227 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.419304 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.419416 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.419437 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.419467 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.419493 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.521835 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.521877 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.521888 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.521908 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.521917 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.625230 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.625271 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.625283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.625301 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.625314 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.646650 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:38 crc kubenswrapper[4879]: E1125 14:25:38.646782 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.727859 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.727935 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.727959 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.727989 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.728013 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.830772 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.830812 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.830821 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.830835 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.830844 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.855564 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerStarted","Data":"aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.869916 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.883794 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.896490 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.918276 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.932779 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.933695 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.933741 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.933750 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.933769 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.933779 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:38Z","lastTransitionTime":"2025-11-25T14:25:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.946531 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.958872 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.974224 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:38 crc kubenswrapper[4879]: I1125 14:25:38.989188 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.000230 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:38Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.013268 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.028733 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.035451 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.035484 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.035494 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.035786 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.035809 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.049728 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.059794 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.072065 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.141079 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.141117 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.141152 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.141170 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.141188 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.243672 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.243708 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.243721 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.243735 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.243744 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.263201 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.263449 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:25:47.263415436 +0000 UTC m=+38.866828507 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.346100 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.346146 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.346157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.346172 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.346185 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.364636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.364694 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.364720 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.364744 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364774 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364829 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364854 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364881 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:47.364858099 +0000 UTC m=+38.968271230 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364910 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:47.36489527 +0000 UTC m=+38.968308331 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364856 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364933 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364958 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:47.364952972 +0000 UTC m=+38.968366043 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364829 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.364990 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.365000 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.365024 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:25:47.365016793 +0000 UTC m=+38.968429864 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.448742 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.448776 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.448785 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.448798 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.448808 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.551495 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.551535 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.551545 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.551564 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.551578 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.643916 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.643937 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.644316 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:39 crc kubenswrapper[4879]: E1125 14:25:39.644451 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.654044 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.654269 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.654356 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.654440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.654532 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.660110 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.673254 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.685302 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.696035 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.706163 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.719463 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.736367 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.748371 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.755983 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.756018 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.756028 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.756044 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.756054 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.760652 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.791404 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.809189 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.822855 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.841049 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.855595 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.863500 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.863557 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.863589 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.863609 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.863621 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.866272 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.866491 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.869898 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac63f434-7d8f-471d-8f1e-e1dc48ecb71a" containerID="aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771" exitCode=0 Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.869933 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerDied","Data":"aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.872954 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.889327 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.895445 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.902139 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.915055 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.925756 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.935679 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.950552 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.966532 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.966581 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.966594 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.966613 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.966623 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:39Z","lastTransitionTime":"2025-11-25T14:25:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.970351 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.982847 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:39 crc kubenswrapper[4879]: I1125 14:25:39.996045 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:39Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.009466 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.029655 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.044570 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.054171 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.064193 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.068628 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.068668 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.068679 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.068698 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.068708 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.096971 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.130379 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.140337 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.152440 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.164400 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.171443 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.171485 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.171494 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.171512 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.171525 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.175621 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.191208 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.212210 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.225727 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.237651 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.250247 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.268407 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.274038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.274183 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.274223 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.274260 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.274288 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.285037 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.295979 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.306537 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.320831 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.377284 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.377324 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.377335 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.377352 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.377363 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.480239 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.480295 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.480308 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.480327 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.480340 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.583212 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.583271 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.583291 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.583311 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.583324 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.643889 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:40 crc kubenswrapper[4879]: E1125 14:25:40.644043 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.685965 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.686003 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.686012 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.686027 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.686037 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.788199 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.788236 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.788245 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.788260 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.788269 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.831920 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.832759 4879 scope.go:117] "RemoveContainer" containerID="2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6" Nov 25 14:25:40 crc kubenswrapper[4879]: E1125 14:25:40.832928 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.877288 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac63f434-7d8f-471d-8f1e-e1dc48ecb71a" containerID="2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51" exitCode=0 Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.877367 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerDied","Data":"2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.877484 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.878032 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.890696 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.890761 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.890784 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.890815 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.890837 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.893025 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.906084 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.914799 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.949668 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.969971 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.983042 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.993502 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.993551 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.993565 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.993584 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.993602 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:40Z","lastTransitionTime":"2025-11-25T14:25:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:40 crc kubenswrapper[4879]: I1125 14:25:40.995889 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:40Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.006500 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.022049 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.042893 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.056339 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.071394 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.084795 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.096336 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.096368 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.096377 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.096391 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.096399 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.096471 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.109671 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.123806 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.135785 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.147733 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.161306 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.180696 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.193355 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.198669 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.198849 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.198926 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.199162 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.199317 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.207532 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.220693 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.233376 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.248668 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.259634 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.275023 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.291362 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.302205 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.302492 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.302645 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.302769 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.302868 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.310525 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.324679 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.338607 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.406726 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.406777 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.406791 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.406809 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.406822 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.509801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.509870 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.509889 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.509915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.509931 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.612718 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.612781 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.612798 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.612825 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.612843 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.644957 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.645623 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:41 crc kubenswrapper[4879]: E1125 14:25:41.645746 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:41 crc kubenswrapper[4879]: E1125 14:25:41.645878 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.715537 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.715574 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.715583 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.715598 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.715609 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.818601 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.818656 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.818671 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.818694 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.818711 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.884173 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" event={"ID":"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a","Type":"ContainerStarted","Data":"9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.884273 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.900862 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.914274 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.921081 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.921138 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.921150 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.921168 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.921180 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:41Z","lastTransitionTime":"2025-11-25T14:25:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.933571 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.945668 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.956581 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.966998 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.984780 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:41 crc kubenswrapper[4879]: I1125 14:25:41.996752 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:41Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.006835 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.023239 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.023286 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.023299 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.023319 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.023332 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.025200 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.044152 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.057978 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.070890 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.082137 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.099759 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.125474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.125518 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.125527 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.125573 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.125589 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.227476 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.227512 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.227520 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.227535 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.227544 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.330329 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.330648 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.330774 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.330877 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.330974 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.433585 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.433631 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.433643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.433661 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.433674 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.537112 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.537176 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.537190 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.537207 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.537218 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.639763 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.639801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.639812 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.639830 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.639847 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.644048 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:42 crc kubenswrapper[4879]: E1125 14:25:42.644193 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.742050 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.742100 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.742113 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.742150 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.742160 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.844425 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.844464 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.844475 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.844493 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.844504 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.889698 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/0.log" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.893163 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6" exitCode=1 Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.893223 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.893892 4879 scope.go:117] "RemoveContainer" containerID="b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.908494 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.926603 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.938291 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.949880 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.949939 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.949950 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.949970 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.949982 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:42Z","lastTransitionTime":"2025-11-25T14:25:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.963217 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:42Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1125 14:25:42.583255 6163 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 14:25:42.583269 6163 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 14:25:42.583274 6163 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 14:25:42.583273 6163 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 14:25:42.583286 6163 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1125 14:25:42.583292 6163 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1125 14:25:42.583296 6163 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 14:25:42.583297 6163 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1125 14:25:42.583312 6163 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 14:25:42.583319 6163 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 14:25:42.583323 6163 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 14:25:42.583325 6163 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 14:25:42.583330 6163 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 14:25:42.583339 6163 factory.go:656] Stopping watch factory\\\\nI1125 14:25:42.583340 6163 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 14:25:42.583355 6163 ovnkube.go:599] Stopped ovnkube\\\\nI11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.981499 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:42 crc kubenswrapper[4879]: I1125 14:25:42.997561 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:42Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.009721 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.023927 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.037912 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.053259 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.053504 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.053525 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.053536 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.053554 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.053566 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.067634 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.082014 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.106243 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.120761 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.131252 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.156681 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.156736 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.156747 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.156769 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.156783 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.258839 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.258888 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.258899 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.258916 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.258929 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.361077 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.361114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.361145 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.361160 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.361170 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.463381 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.463434 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.463445 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.463464 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.463489 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.567631 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.567685 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.567699 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.567721 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.567737 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.644509 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.644581 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:43 crc kubenswrapper[4879]: E1125 14:25:43.644706 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:43 crc kubenswrapper[4879]: E1125 14:25:43.644848 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.670175 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.670229 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.670247 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.670274 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.670294 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.772379 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.772428 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.772440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.772459 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.772472 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.874857 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.874901 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.874909 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.874924 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.874933 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.898545 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/0.log" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.902007 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.902155 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.915894 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.928396 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.944329 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.959143 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.964499 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s"] Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.964943 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.966259 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.966821 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.976802 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.976837 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.976850 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.976869 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.976879 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:43Z","lastTransitionTime":"2025-11-25T14:25:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.982634 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:42Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1125 14:25:42.583255 6163 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 14:25:42.583269 6163 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 14:25:42.583274 6163 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 14:25:42.583273 6163 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 14:25:42.583286 6163 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1125 14:25:42.583292 6163 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1125 14:25:42.583296 6163 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 14:25:42.583297 6163 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1125 14:25:42.583312 6163 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 14:25:42.583319 6163 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 14:25:42.583323 6163 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 14:25:42.583325 6163 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 14:25:42.583330 6163 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 14:25:42.583339 6163 factory.go:656] Stopping watch factory\\\\nI1125 14:25:42.583340 6163 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 14:25:42.583355 6163 ovnkube.go:599] Stopped ovnkube\\\\nI11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:43 crc kubenswrapper[4879]: I1125 14:25:43.993769 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:43Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.004518 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.015918 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.028162 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.039789 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.040024 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.040071 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.040306 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl78c\" (UniqueName: \"kubernetes.io/projected/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-kube-api-access-cl78c\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.046693 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.059892 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.073209 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.080565 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.080611 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.080622 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.080640 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.080652 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.091592 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.115017 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.128883 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.141351 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.141400 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.141444 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl78c\" (UniqueName: \"kubernetes.io/projected/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-kube-api-access-cl78c\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.141466 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.142169 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.142285 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.144509 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.154820 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.163850 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.167089 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl78c\" (UniqueName: \"kubernetes.io/projected/e92a55f6-51a0-4f4f-b5eb-594867b91b4a-kube-api-access-cl78c\") pod \"ovnkube-control-plane-749d76644c-wf76s\" (UID: \"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.180530 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.183768 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.183809 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.183823 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.183878 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.183891 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.194474 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.208947 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.223096 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.240569 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.261530 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.276613 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.276620 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.288767 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.288822 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.288835 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.288853 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.288864 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.295482 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: W1125 14:25:44.301633 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode92a55f6_51a0_4f4f_b5eb_594867b91b4a.slice/crio-a9b707a70969e3c742af1d6276568bc9e467f34eca3729163e2e9b8af579d07b WatchSource:0}: Error finding container a9b707a70969e3c742af1d6276568bc9e467f34eca3729163e2e9b8af579d07b: Status 404 returned error can't find the container with id a9b707a70969e3c742af1d6276568bc9e467f34eca3729163e2e9b8af579d07b Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.310514 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.324618 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.338711 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.360177 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:42Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1125 14:25:42.583255 6163 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 14:25:42.583269 6163 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 14:25:42.583274 6163 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 14:25:42.583273 6163 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 14:25:42.583286 6163 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1125 14:25:42.583292 6163 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1125 14:25:42.583296 6163 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 14:25:42.583297 6163 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1125 14:25:42.583312 6163 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 14:25:42.583319 6163 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 14:25:42.583323 6163 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 14:25:42.583325 6163 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 14:25:42.583330 6163 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 14:25:42.583339 6163 factory.go:656] Stopping watch factory\\\\nI1125 14:25:42.583340 6163 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 14:25:42.583355 6163 ovnkube.go:599] Stopped ovnkube\\\\nI11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.372454 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.386162 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.391717 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.391769 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.391783 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.391801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.391816 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.494267 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.494333 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.494347 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.494383 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.494395 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.596518 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.596566 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.596579 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.596599 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.596612 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.644192 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:44 crc kubenswrapper[4879]: E1125 14:25:44.644305 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.699629 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.699667 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.699679 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.699696 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.699709 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.802448 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.802786 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.802940 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.803100 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.803272 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.904864 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.905101 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.905229 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.905330 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.905436 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:44Z","lastTransitionTime":"2025-11-25T14:25:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.910729 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/1.log" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.911432 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/0.log" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.913931 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7" exitCode=1 Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.914061 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.914183 4879 scope.go:117] "RemoveContainer" containerID="b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.914856 4879 scope.go:117] "RemoveContainer" containerID="0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7" Nov 25 14:25:44 crc kubenswrapper[4879]: E1125 14:25:44.915051 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.918279 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" event={"ID":"e92a55f6-51a0-4f4f-b5eb-594867b91b4a","Type":"ContainerStarted","Data":"77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.918401 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" event={"ID":"e92a55f6-51a0-4f4f-b5eb-594867b91b4a","Type":"ContainerStarted","Data":"2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.918463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" event={"ID":"e92a55f6-51a0-4f4f-b5eb-594867b91b4a","Type":"ContainerStarted","Data":"a9b707a70969e3c742af1d6276568bc9e467f34eca3729163e2e9b8af579d07b"} Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.927958 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:44 crc kubenswrapper[4879]: I1125 14:25:44.947433 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.002669 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:42Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1125 14:25:42.583255 6163 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 14:25:42.583269 6163 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 14:25:42.583274 6163 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 14:25:42.583273 6163 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 14:25:42.583286 6163 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1125 14:25:42.583292 6163 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1125 14:25:42.583296 6163 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 14:25:42.583297 6163 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1125 14:25:42.583312 6163 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 14:25:42.583319 6163 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 14:25:42.583323 6163 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 14:25:42.583325 6163 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 14:25:42.583330 6163 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 14:25:42.583339 6163 factory.go:656] Stopping watch factory\\\\nI1125 14:25:42.583340 6163 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 14:25:42.583355 6163 ovnkube.go:599] Stopped ovnkube\\\\nI11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:44Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.009273 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.009313 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.009331 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.009351 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.009365 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.023835 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.035760 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.049047 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.062610 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.076743 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.086778 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.095443 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.104830 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.111353 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.111383 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.111395 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.111412 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.111424 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.116910 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.133861 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.146495 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.159667 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.168469 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.179751 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.197048 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.209300 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.214713 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.214813 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.214835 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.214865 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.214883 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.229071 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.251097 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:42Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1125 14:25:42.583255 6163 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 14:25:42.583269 6163 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 14:25:42.583274 6163 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 14:25:42.583273 6163 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 14:25:42.583286 6163 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1125 14:25:42.583292 6163 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1125 14:25:42.583296 6163 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 14:25:42.583297 6163 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1125 14:25:42.583312 6163 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 14:25:42.583319 6163 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 14:25:42.583323 6163 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 14:25:42.583325 6163 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 14:25:42.583330 6163 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 14:25:42.583339 6163 factory.go:656] Stopping watch factory\\\\nI1125 14:25:42.583340 6163 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 14:25:42.583355 6163 ovnkube.go:599] Stopped ovnkube\\\\nI11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.262798 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.273670 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.286150 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.297424 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.315295 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.317163 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.317202 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.317210 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.317225 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.317237 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.328609 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.339383 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.351244 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.367532 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.380758 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.390218 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.409964 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-48cv4"] Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.410507 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:45 crc kubenswrapper[4879]: E1125 14:25:45.410570 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.419595 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.419638 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.419649 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.419666 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.419678 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.432264 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.444362 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.454882 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.465349 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.478043 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.489591 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.499331 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.510474 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.522158 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.522202 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.522213 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.522227 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.522237 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.522580 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.534245 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.547553 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.563006 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg26z\" (UniqueName: \"kubernetes.io/projected/960966b7-77d2-49d8-bfcc-2aa44e032f8c-kube-api-access-kg26z\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.563085 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.573513 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4fbbebe5af8f485f4a89c49682dbeae49fc827859b73ce8e5a8d9c96d763be6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:42Z\\\",\\\"message\\\":\\\"8 for removal\\\\nI1125 14:25:42.583255 6163 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 14:25:42.583269 6163 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 14:25:42.583274 6163 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 14:25:42.583273 6163 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 14:25:42.583286 6163 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1125 14:25:42.583292 6163 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1125 14:25:42.583296 6163 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 14:25:42.583297 6163 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1125 14:25:42.583312 6163 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 14:25:42.583319 6163 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 14:25:42.583323 6163 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 14:25:42.583325 6163 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 14:25:42.583330 6163 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 14:25:42.583339 6163 factory.go:656] Stopping watch factory\\\\nI1125 14:25:42.583340 6163 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 14:25:42.583355 6163 ovnkube.go:599] Stopped ovnkube\\\\nI11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.608300 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.624250 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.624297 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.624308 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.624326 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.624338 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.643717 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.643745 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:45 crc kubenswrapper[4879]: E1125 14:25:45.643878 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:45 crc kubenswrapper[4879]: E1125 14:25:45.644021 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.650823 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.664159 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg26z\" (UniqueName: \"kubernetes.io/projected/960966b7-77d2-49d8-bfcc-2aa44e032f8c-kube-api-access-kg26z\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.664214 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:45 crc kubenswrapper[4879]: E1125 14:25:45.664346 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:45 crc kubenswrapper[4879]: E1125 14:25:45.664402 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:25:46.164387052 +0000 UTC m=+37.767800123 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.689084 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.716844 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg26z\" (UniqueName: \"kubernetes.io/projected/960966b7-77d2-49d8-bfcc-2aa44e032f8c-kube-api-access-kg26z\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.727056 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.727111 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.727156 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.727174 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.727185 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.750859 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.787298 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.829408 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.829443 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.829453 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.829468 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.829477 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.924232 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/1.log" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.928997 4879 scope.go:117] "RemoveContainer" containerID="0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7" Nov 25 14:25:45 crc kubenswrapper[4879]: E1125 14:25:45.929617 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.930991 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.931041 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.931051 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.931069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.931079 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:45Z","lastTransitionTime":"2025-11-25T14:25:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.945968 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.975705 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:45 crc kubenswrapper[4879]: I1125 14:25:45.990080 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:45Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.006391 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.024992 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.034160 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.034207 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.034215 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.034230 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.034239 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.041507 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.069576 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.110491 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.137189 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.137238 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.137248 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.137266 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.137276 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.150749 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.170229 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:46 crc kubenswrapper[4879]: E1125 14:25:46.170402 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:46 crc kubenswrapper[4879]: E1125 14:25:46.170463 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:25:47.170444572 +0000 UTC m=+38.773857653 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.192383 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.228434 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.240469 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.240529 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.240540 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.240559 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.240571 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.274241 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.318237 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.343933 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.343995 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.344008 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.344032 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.344047 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.352926 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.391556 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.433022 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.447346 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.447423 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.447442 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.447468 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.447484 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.469376 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:46Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.549783 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.549843 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.549855 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.549892 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.549904 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.644802 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:46 crc kubenswrapper[4879]: E1125 14:25:46.645011 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.652969 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.653010 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.653021 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.653037 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.653047 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.755596 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.755643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.755654 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.755673 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.755683 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.858293 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.858383 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.858410 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.858444 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.858468 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.961951 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.961992 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.962034 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.962051 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:46 crc kubenswrapper[4879]: I1125 14:25:46.962061 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:46Z","lastTransitionTime":"2025-11-25T14:25:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.065062 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.065099 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.065109 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.065152 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.065162 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.167668 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.167725 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.167743 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.167766 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.167783 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.182943 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.183171 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.183296 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:25:49.183267424 +0000 UTC m=+40.786680495 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.270098 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.270177 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.270186 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.270203 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.270214 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.283764 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.284110 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:26:03.284087979 +0000 UTC m=+54.887501050 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.372507 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.372776 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.372879 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.372946 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.373021 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.385085 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.385173 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.385263 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385306 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385367 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:03.385348677 +0000 UTC m=+54.988761748 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.385306 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385383 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385521 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385541 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385392 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385416 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385702 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385716 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385586 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:03.385571263 +0000 UTC m=+54.988984344 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385776 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:03.385759638 +0000 UTC m=+54.989172719 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.385796 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:03.385785279 +0000 UTC m=+54.989198360 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.394575 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.394641 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.394660 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.394684 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.394700 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.408149 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:47Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.411766 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.412013 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.412097 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.412196 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.412284 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.426415 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:47Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.430643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.430693 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.430702 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.430717 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.430728 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.442813 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:47Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.446951 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.447024 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.447040 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.447065 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.447084 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.460729 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:47Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.464075 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.464111 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.464134 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.464150 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.464161 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.474407 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:47Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.474592 4879 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.476053 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.476096 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.476108 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.476142 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.476155 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.579164 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.579244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.579261 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.579277 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.579288 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.644557 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.644560 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.644583 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.644936 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.644703 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:47 crc kubenswrapper[4879]: E1125 14:25:47.644967 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.682095 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.682443 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.682535 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.682626 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.682750 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.785822 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.786212 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.786339 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.786461 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.786582 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.889244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.889290 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.889301 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.889318 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.889336 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.992064 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.992292 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.992327 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.992348 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:47 crc kubenswrapper[4879]: I1125 14:25:47.992362 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:47Z","lastTransitionTime":"2025-11-25T14:25:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.095156 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.095197 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.095207 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.095227 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.095237 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.197660 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.197716 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.197725 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.197742 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.197751 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.301813 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.301861 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.301870 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.301884 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.301893 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.405298 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.405340 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.405349 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.405382 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.405395 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.507929 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.507983 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.507994 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.508014 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.508027 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.610967 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.611042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.611054 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.611069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.611082 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.644526 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:48 crc kubenswrapper[4879]: E1125 14:25:48.644842 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.713591 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.713639 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.713652 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.713669 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.713679 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.815923 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.816199 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.816311 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.816413 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.816493 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.920282 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.920349 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.920368 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.920395 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:48 crc kubenswrapper[4879]: I1125 14:25:48.920408 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:48Z","lastTransitionTime":"2025-11-25T14:25:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.023065 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.023182 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.023199 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.023223 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.023235 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.126391 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.126492 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.126514 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.126541 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.126555 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.207855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:49 crc kubenswrapper[4879]: E1125 14:25:49.208220 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:49 crc kubenswrapper[4879]: E1125 14:25:49.208290 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:25:53.208269569 +0000 UTC m=+44.811682640 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.230263 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.230378 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.230389 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.230411 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.230423 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.334042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.334094 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.334107 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.334187 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.334206 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.436996 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.437074 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.437085 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.437102 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.437114 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.540610 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.540683 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.540718 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.540748 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.540771 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.643967 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644211 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:49 crc kubenswrapper[4879]: E1125 14:25:49.644430 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644481 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644525 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644548 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644578 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644597 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:49 crc kubenswrapper[4879]: E1125 14:25:49.644689 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.644605 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: E1125 14:25:49.644836 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.674957 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.690658 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.707065 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.723860 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.737953 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.752421 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.752455 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.752464 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.752480 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.752491 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.755526 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.766380 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.776909 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.788393 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.799845 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.810725 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.827678 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.838509 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.850255 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.853926 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.853958 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.853966 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.853981 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.853990 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.860220 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.871795 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.881622 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:49Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.955954 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.956010 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.956023 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.956042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:49 crc kubenswrapper[4879]: I1125 14:25:49.956054 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:49Z","lastTransitionTime":"2025-11-25T14:25:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.060033 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.060114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.060148 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.060170 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.060184 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.162901 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.162962 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.162975 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.162991 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.163000 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.265442 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.265485 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.265496 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.265511 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.265527 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.367910 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.367963 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.367973 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.367991 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.368004 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.470704 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.470752 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.470761 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.470779 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.470790 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.573060 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.573137 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.573152 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.573169 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.573179 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.643925 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:50 crc kubenswrapper[4879]: E1125 14:25:50.644054 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.676611 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.676685 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.676697 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.676720 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.677007 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.779952 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.780031 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.780043 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.780066 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.780080 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.883471 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.883537 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.883551 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.883574 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.883587 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.986494 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.986559 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.986573 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.986592 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:50 crc kubenswrapper[4879]: I1125 14:25:50.986606 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:50Z","lastTransitionTime":"2025-11-25T14:25:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.090037 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.090099 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.090113 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.090162 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.090182 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.193314 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.193352 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.193363 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.193380 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.193393 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.297077 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.297142 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.297152 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.297171 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.297184 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.400239 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.400309 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.400320 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.400335 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.400344 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.503474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.503527 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.503540 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.503554 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.503566 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.606188 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.606249 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.606266 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.606288 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.606302 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.644005 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.644005 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:51 crc kubenswrapper[4879]: E1125 14:25:51.644182 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:51 crc kubenswrapper[4879]: E1125 14:25:51.644206 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.644776 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:51 crc kubenswrapper[4879]: E1125 14:25:51.644919 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.709778 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.709829 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.709839 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.709861 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.709874 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.813692 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.813785 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.813810 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.813844 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.813866 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.917012 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.917071 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.917084 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.917104 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:51 crc kubenswrapper[4879]: I1125 14:25:51.917145 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:51Z","lastTransitionTime":"2025-11-25T14:25:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.020383 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.020428 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.020440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.020459 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.020469 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.123481 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.123543 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.123555 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.123573 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.123585 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.227255 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.227314 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.227329 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.227346 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.227362 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.329727 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.329801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.329818 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.329839 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.329853 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.431891 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.431945 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.431953 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.431965 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.431974 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.534722 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.534766 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.534775 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.534790 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.534800 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.636787 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.636859 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.636880 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.636905 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.636923 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.644346 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:52 crc kubenswrapper[4879]: E1125 14:25:52.644484 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.739373 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.739421 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.739437 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.739454 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.739465 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.842904 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.844157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.844186 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.844203 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.844212 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.946543 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.946599 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.946612 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.946629 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:52 crc kubenswrapper[4879]: I1125 14:25:52.946642 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:52Z","lastTransitionTime":"2025-11-25T14:25:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.049060 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.049099 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.049108 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.049157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.049170 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.153087 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.153148 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.153157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.153173 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.153184 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.256058 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.256148 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.256165 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.256184 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.256198 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.258522 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:53 crc kubenswrapper[4879]: E1125 14:25:53.258693 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:53 crc kubenswrapper[4879]: E1125 14:25:53.258767 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:26:01.258745243 +0000 UTC m=+52.862158334 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.359180 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.359217 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.359226 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.359243 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.359254 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.461769 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.461846 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.461862 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.461887 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.461900 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.564729 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.564787 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.564796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.564813 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.564823 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.643946 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.643968 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:53 crc kubenswrapper[4879]: E1125 14:25:53.644092 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.643949 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:53 crc kubenswrapper[4879]: E1125 14:25:53.644245 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:53 crc kubenswrapper[4879]: E1125 14:25:53.644354 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.667798 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.667841 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.667853 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.667870 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.667881 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.770380 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.770422 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.770432 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.770449 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.770461 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.872795 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.872851 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.872864 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.872891 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.872908 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.975400 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.975440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.975457 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.975474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:53 crc kubenswrapper[4879]: I1125 14:25:53.975485 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:53Z","lastTransitionTime":"2025-11-25T14:25:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.078798 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.078845 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.078871 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.078892 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.078906 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.181873 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.181940 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.181956 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.181980 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.181995 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.284427 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.284474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.284485 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.284503 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.284515 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.387500 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.387584 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.387614 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.387642 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.387662 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.490402 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.490455 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.490464 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.490479 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.490488 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.593548 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.593603 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.593618 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.593638 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.593653 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.644817 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:54 crc kubenswrapper[4879]: E1125 14:25:54.645303 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.645564 4879 scope.go:117] "RemoveContainer" containerID="2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.697612 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.697657 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.697667 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.697688 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.697700 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.800088 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.800155 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.800166 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.800185 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.800195 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.902531 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.902588 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.902606 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.902630 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.902643 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:54Z","lastTransitionTime":"2025-11-25T14:25:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.960895 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.963625 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09"} Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.964159 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:25:54 crc kubenswrapper[4879]: I1125 14:25:54.984455 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:54Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.001158 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:54Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.005633 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.005678 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.005691 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.005709 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.005720 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.019694 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.033217 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.048921 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.063354 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.077200 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.088742 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.101787 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.108251 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.108289 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.108300 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.108318 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.108332 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.115092 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.130921 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.145801 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.167732 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.183374 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.202242 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.211229 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.211280 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.211292 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.211312 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.211329 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.219766 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.234507 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:55Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.314102 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.314174 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.314186 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.314205 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.314217 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.416823 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.416873 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.416882 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.416898 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.416909 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.520812 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.520900 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.520933 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.520965 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.520989 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.623998 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.624050 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.624060 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.624078 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.624087 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.644681 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.644785 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:55 crc kubenswrapper[4879]: E1125 14:25:55.644863 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.644893 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:55 crc kubenswrapper[4879]: E1125 14:25:55.645076 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:55 crc kubenswrapper[4879]: E1125 14:25:55.645276 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.726441 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.726498 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.726506 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.726520 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.726530 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.829750 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.829826 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.829848 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.829879 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.829904 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.932695 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.932741 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.932754 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.932775 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:55 crc kubenswrapper[4879]: I1125 14:25:55.932788 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:55Z","lastTransitionTime":"2025-11-25T14:25:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.035330 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.035404 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.035420 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.035451 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.035469 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.138945 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.139004 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.139022 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.139081 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.139102 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.241985 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.242024 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.242038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.242057 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.242074 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.345066 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.345163 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.345184 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.345212 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.345231 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.448592 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.448642 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.448653 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.448670 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.448683 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.550833 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.550896 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.550912 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.550944 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.550984 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.644631 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:56 crc kubenswrapper[4879]: E1125 14:25:56.644803 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.653483 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.653556 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.653566 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.653585 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.653599 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.756059 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.756123 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.756141 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.756190 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.756207 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.859872 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.859927 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.859936 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.859954 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.859966 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.962485 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.962536 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.962545 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.962562 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:56 crc kubenswrapper[4879]: I1125 14:25:56.962571 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:56Z","lastTransitionTime":"2025-11-25T14:25:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.065213 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.065267 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.065280 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.065300 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.065316 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.168022 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.168066 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.168074 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.168086 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.168096 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.270954 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.271010 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.271023 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.271041 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.271051 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.373774 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.373824 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.373832 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.373846 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.373855 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.462236 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.474295 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.474535 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.475984 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.476035 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.476046 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.476063 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.476077 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.484184 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.484233 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.484248 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.484267 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.484281 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.489168 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.498077 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.502210 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.502882 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.502913 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.502921 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.502934 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.502943 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.516056 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.517482 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.521765 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.521792 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.521801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.521816 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.521826 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.529575 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.533263 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.539889 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.539927 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.539958 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.539978 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.539990 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.551038 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.552452 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.556386 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.556425 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.556439 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.556465 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.556481 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.565616 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.569508 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.569623 4879 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.578980 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.579020 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.579032 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.579054 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.579068 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.579911 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.592771 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.606635 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.628319 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.643342 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.643811 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.643916 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.644076 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.644164 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.648735 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:57 crc kubenswrapper[4879]: E1125 14:25:57.650122 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.650884 4879 scope.go:117] "RemoveContainer" containerID="0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.659565 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.672752 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.681484 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.681515 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.681523 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.681541 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.681575 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.687937 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.701097 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.716292 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.785000 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.785048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.785064 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.785110 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.785614 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.889273 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.889305 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.889314 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.889330 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.889341 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.976485 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/1.log" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.980431 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.980682 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.992398 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.992452 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.992465 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.992483 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.992496 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:57Z","lastTransitionTime":"2025-11-25T14:25:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:57 crc kubenswrapper[4879]: I1125 14:25:57.995428 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:57Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.013691 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.027266 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.040952 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.055471 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.074901 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.084976 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.095250 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.095298 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.095308 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.095325 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.095338 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.099799 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.112432 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.127288 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.139936 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.154975 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.170269 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.191793 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.197518 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.197557 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.197567 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.197583 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.197592 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.205632 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.220461 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.241763 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.253828 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.269455 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:58Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.300175 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.300227 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.300237 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.300252 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.300261 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.402999 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.403759 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.403798 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.403820 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.403831 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.506924 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.507660 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.507682 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.507701 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.507714 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.610507 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.610562 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.610571 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.610586 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.610596 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.644252 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:25:58 crc kubenswrapper[4879]: E1125 14:25:58.644420 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.712982 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.713019 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.713031 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.713048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.713060 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.815949 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.815991 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.816001 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.816019 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.816028 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.918269 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.918316 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.918332 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.918355 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.918367 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:58Z","lastTransitionTime":"2025-11-25T14:25:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.985939 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/2.log" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.986635 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/1.log" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.989142 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698" exitCode=1 Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.989271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698"} Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.989321 4879 scope.go:117] "RemoveContainer" containerID="0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7" Nov 25 14:25:58 crc kubenswrapper[4879]: I1125 14:25:58.990174 4879 scope.go:117] "RemoveContainer" containerID="fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698" Nov 25 14:25:58 crc kubenswrapper[4879]: E1125 14:25:58.990386 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.005176 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.018344 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.020351 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.020386 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.020405 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.020422 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.020434 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.031908 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.046234 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.057811 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.079434 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.092854 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.103655 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.116075 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.122399 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.122603 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.122694 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.122778 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.122856 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.130581 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.142849 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.152348 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.165594 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.180139 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.192267 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.203478 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.226237 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.226487 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.226611 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.226710 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.226791 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.230077 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.244333 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.330451 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.330514 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.330530 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.330548 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.330558 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.432922 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.433116 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.433186 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.433224 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.433244 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.535658 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.535713 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.535725 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.535745 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.535757 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.637677 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.637755 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.637779 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.637817 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.637841 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.643985 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.644082 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:25:59 crc kubenswrapper[4879]: E1125 14:25:59.644106 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.644005 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:25:59 crc kubenswrapper[4879]: E1125 14:25:59.644372 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:25:59 crc kubenswrapper[4879]: E1125 14:25:59.644562 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.661110 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.673947 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.692677 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.712299 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fee6fa0e0ab12f869466c94927ba31b0bc7519adb9303cebf0e3db877c4afe7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"message\\\":\\\":true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53: 10.217.4.10:9154:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {be9dcc9e-c16a-4962-a6d2-4adeb0b929c4}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-dns/dns-default]} name:Service_openshift-dns/dns-default_UDP_node_router+switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[udp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.10:53:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4c1be812-05d3-4f45-91b5-a853a5c8de71}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1125 14:25:44.044324 6313 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1125 14:25:44.044623 6313 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1125 14:25:44.044699 6313 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1125 14:25:44.044730 6313 ovnkube.go:599] Stopped ovnkube\\\\nI1125 14:25:44.044749 6313 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1125 14:25:44.044804 6313 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.727785 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.741065 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.741110 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.741123 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.741158 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.741173 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.747778 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.768151 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.784818 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.795683 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.808629 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.822392 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.836458 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.844643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.844699 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.844713 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.844738 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.844755 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.849843 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.861776 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.878027 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.899271 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.915182 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.929829 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:25:59Z is after 2025-08-24T17:21:41Z" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.947203 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.947249 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.947264 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.947287 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.947302 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:25:59Z","lastTransitionTime":"2025-11-25T14:25:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.995636 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/2.log" Nov 25 14:25:59 crc kubenswrapper[4879]: I1125 14:25:59.999918 4879 scope.go:117] "RemoveContainer" containerID="fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698" Nov 25 14:26:00 crc kubenswrapper[4879]: E1125 14:26:00.000104 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.017904 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.032590 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.049917 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.050491 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.050549 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.050564 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.050584 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.050597 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.066405 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.094849 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.109101 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.122977 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.136162 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.149126 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.153038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.153061 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.153070 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.153085 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.153095 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.163190 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.175128 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.196087 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.213812 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.225611 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.237105 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.250770 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.254853 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.254898 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.254909 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.254923 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.254933 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.264114 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.273336 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:00Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.357237 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.357337 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.357359 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.357386 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.357408 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.459948 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.459992 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.460003 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.460020 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.460031 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.563844 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.563900 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.563928 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.563952 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.563967 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.643903 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:00 crc kubenswrapper[4879]: E1125 14:26:00.644085 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.666634 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.666683 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.666697 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.666716 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.666731 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.770077 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.770154 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.770168 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.770188 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.770201 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.873292 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.873367 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.873381 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.873406 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.873426 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.976449 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.976495 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.976505 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.976525 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:00 crc kubenswrapper[4879]: I1125 14:26:00.976536 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:00Z","lastTransitionTime":"2025-11-25T14:26:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.079530 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.079586 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.079596 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.079611 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.079621 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.182290 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.182323 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.182333 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.182352 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.182370 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.285157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.285209 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.285224 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.285243 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.285253 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.354335 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:01 crc kubenswrapper[4879]: E1125 14:26:01.354585 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:26:01 crc kubenswrapper[4879]: E1125 14:26:01.354703 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:26:17.354670218 +0000 UTC m=+68.958083369 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.387932 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.388950 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.388988 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.389022 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.389034 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.491042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.491077 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.491086 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.491100 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.491109 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.594275 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.594323 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.594335 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.594351 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.594360 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.644185 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.644246 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.644210 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:01 crc kubenswrapper[4879]: E1125 14:26:01.644342 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:01 crc kubenswrapper[4879]: E1125 14:26:01.644439 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:01 crc kubenswrapper[4879]: E1125 14:26:01.644536 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.697694 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.697739 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.697753 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.697773 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.697784 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.800069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.800116 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.800141 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.800159 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.800170 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.903304 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.903354 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.903365 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.903381 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:01 crc kubenswrapper[4879]: I1125 14:26:01.903392 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:01Z","lastTransitionTime":"2025-11-25T14:26:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.005261 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.005339 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.005357 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.005378 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.005388 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.108870 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.108939 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.108956 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.108976 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.108987 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.211763 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.211816 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.211826 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.211844 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.211855 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.314776 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.314833 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.314846 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.314865 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.314880 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.418792 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.418864 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.418909 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.418928 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.418941 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.522562 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.522632 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.522645 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.522672 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.522687 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.630451 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.630502 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.630512 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.630527 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.630538 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.644075 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:02 crc kubenswrapper[4879]: E1125 14:26:02.644340 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.733489 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.733556 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.733569 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.733595 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.733610 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.837074 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.837218 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.837230 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.837244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.837253 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.940190 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.940230 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.940240 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.940271 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:02 crc kubenswrapper[4879]: I1125 14:26:02.940282 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:02Z","lastTransitionTime":"2025-11-25T14:26:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.043076 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.043151 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.043161 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.043178 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.043189 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.145682 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.145742 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.145754 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.145773 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.145788 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.248835 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.248886 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.248898 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.248915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.248927 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.351486 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.351530 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.351539 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.351557 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.351568 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.380170 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.380327 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:26:35.380303572 +0000 UTC m=+86.983716653 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.453813 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.453860 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.453870 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.453886 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.453898 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.481654 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.481722 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.481751 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.481778 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481815 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481841 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481856 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481873 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481914 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:35.481898028 +0000 UTC m=+87.085311099 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481927 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:35.481922049 +0000 UTC m=+87.085335120 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481826 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481947 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:35.48194354 +0000 UTC m=+87.085356601 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.481983 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.482030 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.482044 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.482111 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:26:35.482092944 +0000 UTC m=+87.085506015 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.556693 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.556737 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.556745 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.556760 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.556770 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.644556 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.644603 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.644661 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.644701 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.644794 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:03 crc kubenswrapper[4879]: E1125 14:26:03.644916 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.659779 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.659828 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.659843 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.659863 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.659873 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.762204 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.762272 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.762280 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.762295 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.762305 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.864859 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.864894 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.864902 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.864915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.864923 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.968163 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.968208 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.968219 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.968238 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:03 crc kubenswrapper[4879]: I1125 14:26:03.968251 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:03Z","lastTransitionTime":"2025-11-25T14:26:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.070759 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.070804 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.070816 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.070832 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.070844 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.173399 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.173690 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.173757 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.173828 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.173897 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.278408 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.279438 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.279539 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.279648 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.279715 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.382676 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.382735 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.382751 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.382773 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.382789 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.486808 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.486859 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.486871 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.486890 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.486909 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.589332 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.589590 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.589722 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.589821 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.589906 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.644469 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:04 crc kubenswrapper[4879]: E1125 14:26:04.644618 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.692930 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.692974 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.692984 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.692998 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.693007 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.795515 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.795567 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.795587 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.795617 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.795633 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.902583 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.902629 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.902641 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.902659 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:04 crc kubenswrapper[4879]: I1125 14:26:04.902678 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:04Z","lastTransitionTime":"2025-11-25T14:26:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.005600 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.005638 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.005652 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.005670 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.005683 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.107862 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.107901 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.107910 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.107923 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.107934 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.209912 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.209994 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.210019 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.210041 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.210055 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.312299 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.312365 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.312379 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.312397 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.312409 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.414776 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.414869 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.414897 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.414928 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.414950 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.518313 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.518369 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.518379 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.518397 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.518408 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.620795 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.620845 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.620856 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.620872 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.620883 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.644685 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.644755 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.644802 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:05 crc kubenswrapper[4879]: E1125 14:26:05.644917 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:05 crc kubenswrapper[4879]: E1125 14:26:05.645033 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:05 crc kubenswrapper[4879]: E1125 14:26:05.645153 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.703422 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.718211 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.724463 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.724543 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.724568 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.724598 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.724619 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.736617 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.750455 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.761919 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.778204 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.796505 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.809881 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.825255 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.827053 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.827101 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.827111 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.827141 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.827152 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.839017 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.851433 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.862935 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.873994 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.891577 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.913006 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.930243 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.930862 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.930915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.930930 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.930949 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.930960 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:05Z","lastTransitionTime":"2025-11-25T14:26:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.944498 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.961909 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:05 crc kubenswrapper[4879]: I1125 14:26:05.977499 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:05Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.034200 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.034253 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.034476 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.034508 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.034529 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.137373 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.137419 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.137430 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.137446 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.137457 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.240650 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.240703 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.240717 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.240741 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.240757 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.344059 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.344104 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.344114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.344146 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.344155 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.447356 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.447416 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.447428 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.447446 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.447457 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.550148 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.550221 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.550242 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.550269 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.550288 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.644585 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:06 crc kubenswrapper[4879]: E1125 14:26:06.644848 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.653200 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.653265 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.653279 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.653301 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.653317 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.757110 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.757546 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.757784 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.758004 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.758228 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.861492 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.861550 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.861566 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.861589 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.861604 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.964948 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.965012 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.965028 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.965051 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:06 crc kubenswrapper[4879]: I1125 14:26:06.965067 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:06Z","lastTransitionTime":"2025-11-25T14:26:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.068600 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.068643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.068654 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.068671 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.068684 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.171629 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.171664 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.171672 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.171684 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.171693 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.275158 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.275249 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.275283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.275313 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.275334 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.378429 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.378496 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.378514 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.378540 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.378559 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.481508 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.481564 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.481577 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.481597 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.481609 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.584870 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.585458 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.585657 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.585893 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.586105 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.644787 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.644787 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.644983 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.645208 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.644815 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.645352 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.648816 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.648972 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.649091 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.649265 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.649401 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.663783 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:07Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.668806 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.669019 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.669250 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.669413 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.669548 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.682916 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:07Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.690805 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.691719 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.691753 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.691777 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.691791 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.707831 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:07Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.711726 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.711767 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.711776 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.711790 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.711802 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.722712 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:07Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.761363 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.761665 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.761760 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.761854 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.761933 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.777262 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:07Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:07 crc kubenswrapper[4879]: E1125 14:26:07.777379 4879 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.778622 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.778646 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.778654 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.778667 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.778675 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.880943 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.881315 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.881457 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.881586 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.881685 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.984406 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.984654 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.984747 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.984833 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:07 crc kubenswrapper[4879]: I1125 14:26:07.984906 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:07Z","lastTransitionTime":"2025-11-25T14:26:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.086827 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.087048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.087184 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.087297 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.087382 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.189972 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.190015 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.190023 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.190038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.190050 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.292902 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.292944 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.292954 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.292971 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.292985 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.395343 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.395375 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.395383 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.395396 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.395405 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.497818 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.497887 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.497900 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.497919 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.497930 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.601049 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.601086 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.601098 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.601115 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.601166 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.643913 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:08 crc kubenswrapper[4879]: E1125 14:26:08.644031 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.703844 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.703900 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.703918 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.703943 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.703961 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.806811 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.806879 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.806901 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.806932 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.806957 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.910857 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.910963 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.910986 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.911009 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:08 crc kubenswrapper[4879]: I1125 14:26:08.911025 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:08Z","lastTransitionTime":"2025-11-25T14:26:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.014157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.014206 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.014218 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.014242 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.014257 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.117430 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.117491 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.117508 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.117531 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.117548 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.219961 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.220016 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.220030 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.220048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.220061 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.322944 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.323016 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.323035 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.323054 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.323065 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.426367 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.426432 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.426443 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.426460 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.426495 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.528637 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.528675 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.528683 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.528697 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.528708 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.632095 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.632160 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.632172 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.632188 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.632199 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.643848 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.643958 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.644059 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:09 crc kubenswrapper[4879]: E1125 14:26:09.644046 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:09 crc kubenswrapper[4879]: E1125 14:26:09.644269 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:09 crc kubenswrapper[4879]: E1125 14:26:09.644466 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.660580 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.673583 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.688995 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.702546 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.721493 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.735244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.735301 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.735314 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.735339 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.735354 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.735909 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.764759 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.781487 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.796617 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.811976 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.830255 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.837841 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.837910 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.837923 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.837940 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.837952 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.850388 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.863070 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.885431 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.899571 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.910704 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.923277 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.939692 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.939751 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.939767 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.939792 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.939809 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:09Z","lastTransitionTime":"2025-11-25T14:26:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:09 crc kubenswrapper[4879]: I1125 14:26:09.942563 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:09Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.042290 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.042388 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.042401 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.042420 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.042432 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.144856 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.144916 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.144928 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.144945 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.144956 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.251787 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.251841 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.251856 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.251878 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.251894 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.354175 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.354426 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.354491 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.354556 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.354613 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.459481 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.459528 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.459544 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.459558 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.459572 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.562090 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.562165 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.562180 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.562198 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.562210 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.644667 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:10 crc kubenswrapper[4879]: E1125 14:26:10.644845 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.665197 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.665232 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.665241 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.665256 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.665266 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.767789 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.767833 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.767849 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.767871 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.767890 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.869942 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.869996 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.870014 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.870038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.870055 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.973256 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.973304 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.973317 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.973338 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:10 crc kubenswrapper[4879]: I1125 14:26:10.973352 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:10Z","lastTransitionTime":"2025-11-25T14:26:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.076004 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.076069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.076089 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.076114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.076161 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.178953 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.179018 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.179035 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.179059 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.179076 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.282218 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.282284 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.282308 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.282341 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.282364 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.384745 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.384796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.384811 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.384831 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.384846 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.487682 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.487742 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.487763 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.487794 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.487817 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.590022 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.590088 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.590105 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.590159 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.590178 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.643992 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.644086 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.643988 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:11 crc kubenswrapper[4879]: E1125 14:26:11.644256 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:11 crc kubenswrapper[4879]: E1125 14:26:11.644183 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:11 crc kubenswrapper[4879]: E1125 14:26:11.644378 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.692230 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.692271 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.692285 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.692305 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.692315 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.795287 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.795373 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.795392 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.795412 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.795422 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.898085 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.898175 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.898193 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.898217 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:11 crc kubenswrapper[4879]: I1125 14:26:11.898239 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:11Z","lastTransitionTime":"2025-11-25T14:26:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.001052 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.001111 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.001179 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.001245 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.001264 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.103416 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.103457 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.103479 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.103494 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.103503 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.206904 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.206977 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.207050 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.207090 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.207109 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.309992 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.310042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.310053 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.310071 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.310082 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.412993 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.413038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.413049 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.413069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.413081 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.515940 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.515982 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.515992 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.516008 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.516021 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.619025 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.619078 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.619092 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.619109 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.619142 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.643578 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:12 crc kubenswrapper[4879]: E1125 14:26:12.643710 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.721377 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.721415 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.721425 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.721440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.721449 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.823670 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.823703 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.823712 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.823726 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.823736 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.926429 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.926458 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.926467 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.926479 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:12 crc kubenswrapper[4879]: I1125 14:26:12.926488 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:12Z","lastTransitionTime":"2025-11-25T14:26:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.029106 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.029164 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.029173 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.029219 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.029233 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.131768 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.131815 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.131825 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.131839 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.131850 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.234394 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.234438 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.234454 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.234469 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.234480 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.337244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.337302 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.337315 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.337365 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.337376 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.440315 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.440366 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.440382 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.440402 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.440416 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.542755 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.542794 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.542803 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.542817 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.542826 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.643832 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.643856 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.643909 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:13 crc kubenswrapper[4879]: E1125 14:26:13.644033 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:13 crc kubenswrapper[4879]: E1125 14:26:13.644253 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:13 crc kubenswrapper[4879]: E1125 14:26:13.644368 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.645356 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.645395 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.645411 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.645426 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.645437 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.748701 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.748762 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.748782 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.748806 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.748824 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.852712 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.852797 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.852812 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.852831 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.852846 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.955383 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.955447 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.955465 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.955487 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:13 crc kubenswrapper[4879]: I1125 14:26:13.955505 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:13Z","lastTransitionTime":"2025-11-25T14:26:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.057761 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.057796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.057808 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.057975 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.057993 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.160292 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.160359 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.160378 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.160398 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.160409 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.262736 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.262774 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.262781 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.262796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.262805 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.365371 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.365409 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.365417 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.365431 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.365441 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.467554 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.467605 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.467617 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.467635 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.467646 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.569898 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.569938 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.569948 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.569963 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.569973 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.644386 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:14 crc kubenswrapper[4879]: E1125 14:26:14.644530 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.645167 4879 scope.go:117] "RemoveContainer" containerID="fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698" Nov 25 14:26:14 crc kubenswrapper[4879]: E1125 14:26:14.645362 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.672450 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.672477 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.672485 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.672498 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.672506 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.774369 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.774409 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.774439 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.774463 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.774476 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.877355 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.877396 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.877408 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.877422 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.877432 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.978837 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.978885 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.978894 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.978906 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:14 crc kubenswrapper[4879]: I1125 14:26:14.978921 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:14Z","lastTransitionTime":"2025-11-25T14:26:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.080392 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.080440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.080454 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.080471 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.080484 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.182351 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.182400 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.182414 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.182441 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.182453 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.285527 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.285574 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.285588 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.285607 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.285618 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.388201 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.388295 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.388318 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.388376 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.388395 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.491045 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.491085 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.491098 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.491114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.491147 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.593986 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.594028 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.594038 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.594055 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.594067 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.644662 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.644662 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.644777 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:15 crc kubenswrapper[4879]: E1125 14:26:15.644917 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:15 crc kubenswrapper[4879]: E1125 14:26:15.644988 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:15 crc kubenswrapper[4879]: E1125 14:26:15.645061 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.696800 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.696840 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.696851 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.696895 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.696907 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.800028 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.800065 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.800076 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.800091 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.800100 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.902355 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.902402 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.902415 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.902436 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:15 crc kubenswrapper[4879]: I1125 14:26:15.902451 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:15Z","lastTransitionTime":"2025-11-25T14:26:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.004109 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.004160 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.004168 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.004180 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.004189 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.106587 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.106695 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.106705 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.106751 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.106763 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.209165 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.209217 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.209231 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.209249 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.209261 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.311506 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.311533 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.311541 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.311554 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.311562 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.414671 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.414733 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.414756 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.414785 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.414809 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.517168 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.517210 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.517227 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.517244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.517255 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.619767 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.619831 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.619842 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.619894 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.619905 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.644693 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:16 crc kubenswrapper[4879]: E1125 14:26:16.644877 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.722722 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.722753 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.722761 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.722774 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.722783 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.824595 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.824659 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.824677 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.824700 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.824715 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.929079 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.929135 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.929145 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.929163 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:16 crc kubenswrapper[4879]: I1125 14:26:16.929172 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:16Z","lastTransitionTime":"2025-11-25T14:26:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.031993 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.032052 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.032068 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.032091 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.032108 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.133857 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.133930 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.133942 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.133959 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.133971 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.236472 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.236516 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.236529 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.236546 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.236559 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.338713 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.338751 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.338762 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.338778 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.338790 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.441333 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.441903 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.441927 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.441946 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.441959 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.444790 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:17 crc kubenswrapper[4879]: E1125 14:26:17.444945 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:26:17 crc kubenswrapper[4879]: E1125 14:26:17.445017 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:26:49.445000064 +0000 UTC m=+101.048413135 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.545006 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.545042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.545051 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.545065 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.545077 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.644189 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.644186 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:17 crc kubenswrapper[4879]: E1125 14:26:17.644321 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.644189 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:17 crc kubenswrapper[4879]: E1125 14:26:17.644491 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:17 crc kubenswrapper[4879]: E1125 14:26:17.644633 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.649096 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.649171 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.649194 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.649217 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.649231 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.751820 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.751859 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.751868 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.751881 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.751890 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.853393 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.853432 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.853440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.853454 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.853463 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.955867 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.955948 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.955960 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.955979 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:17 crc kubenswrapper[4879]: I1125 14:26:17.955993 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:17Z","lastTransitionTime":"2025-11-25T14:26:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.021811 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.021858 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.021876 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.021893 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.021904 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.038992 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:18Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.042483 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.042509 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.042517 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.042531 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.042542 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.062024 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:18Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.064847 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.064902 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.064915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.064933 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.064945 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.084959 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:18Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.088805 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.088861 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.088877 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.088899 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.088915 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.100236 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:18Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.103822 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.103883 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.103895 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.103913 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.103926 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.116199 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:18Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.116371 4879 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.117973 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.118015 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.118026 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.118043 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.118056 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.220096 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.220155 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.220168 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.220183 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.220195 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.321956 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.321995 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.322007 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.322037 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.322051 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.424043 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.424078 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.424087 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.424099 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.424108 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.526651 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.526800 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.526818 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.526939 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.526951 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.629192 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.629232 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.629243 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.629259 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.629270 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.643740 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:18 crc kubenswrapper[4879]: E1125 14:26:18.643870 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.731406 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.731447 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.731457 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.731475 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.731489 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.833243 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.833272 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.833283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.833298 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.833309 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.934934 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.934982 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.934993 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.935010 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:18 crc kubenswrapper[4879]: I1125 14:26:18.935023 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:18Z","lastTransitionTime":"2025-11-25T14:26:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.037425 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.037464 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.037474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.037488 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.037497 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.139502 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.139538 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.139547 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.139563 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.139572 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.241994 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.242042 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.242052 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.242066 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.242076 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.344875 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.344930 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.344946 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.344966 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.344976 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.447843 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.447875 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.447884 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.447899 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.447909 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.551255 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.551303 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.551313 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.551336 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.551353 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.644659 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.644702 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.644657 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:19 crc kubenswrapper[4879]: E1125 14:26:19.644778 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:19 crc kubenswrapper[4879]: E1125 14:26:19.644825 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:19 crc kubenswrapper[4879]: E1125 14:26:19.644948 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.653185 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.653218 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.653229 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.653245 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.653257 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.654702 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.666292 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.677262 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.687365 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.698595 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.715520 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.726996 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.738840 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.750537 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.755393 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.755426 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.755436 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.755461 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.755470 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.762087 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.771958 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.783334 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.793362 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.806946 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.816321 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.825753 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.838944 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.857951 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.857979 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.857987 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.858003 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.858012 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.862083 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:19Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.960807 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.960864 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.960876 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.960895 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:19 crc kubenswrapper[4879]: I1125 14:26:19.960906 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:19Z","lastTransitionTime":"2025-11-25T14:26:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.063900 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.063933 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.063941 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.063952 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.063961 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.166517 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.166725 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.166845 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.166941 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.167024 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.269491 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.269755 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.269865 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.269958 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.270033 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.372936 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.373195 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.373335 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.373462 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.373574 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.475659 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.475940 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.476023 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.476113 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.476239 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.578283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.578559 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.578656 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.578746 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.578827 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.643962 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:20 crc kubenswrapper[4879]: E1125 14:26:20.644079 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.680824 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.680863 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.680871 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.680885 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.680893 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.782833 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.782874 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.782883 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.782897 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.782906 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.884905 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.884939 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.884948 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.884962 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.884970 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.987330 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.987401 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.987555 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.987610 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:20 crc kubenswrapper[4879]: I1125 14:26:20.987634 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:20Z","lastTransitionTime":"2025-11-25T14:26:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.068249 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/0.log" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.068305 4879 generic.go:334] "Generic (PLEG): container finished" podID="f1eafdec-4c5a-4e91-97b4-a117c35838d4" containerID="9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3" exitCode=1 Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.068336 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerDied","Data":"9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.068729 4879 scope.go:117] "RemoveContainer" containerID="9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.089486 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.090770 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.090801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.090813 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.090829 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.090840 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.102900 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.115072 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.127357 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:21Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:20Z\\\",\\\"message\\\":\\\"2025-11-25T14:25:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c\\\\n2025-11-25T14:25:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c to /host/opt/cni/bin/\\\\n2025-11-25T14:25:35Z [verbose] multus-daemon started\\\\n2025-11-25T14:25:35Z [verbose] Readiness Indicator file check\\\\n2025-11-25T14:26:20Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.138870 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.156075 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.165681 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.176598 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.190901 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.193521 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.193546 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.193573 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.193586 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.193596 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.213377 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.223901 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.235204 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.246641 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.257683 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.267906 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.285103 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.295373 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.295634 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.295652 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.295661 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.295674 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.295683 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.310636 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:21Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.398364 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.398398 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.398407 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.398422 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.398431 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.500927 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.500965 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.500985 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.501002 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.501014 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.603195 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.603341 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.603355 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.603369 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.603379 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.644254 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:21 crc kubenswrapper[4879]: E1125 14:26:21.644447 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.644552 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.644764 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:21 crc kubenswrapper[4879]: E1125 14:26:21.644777 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:21 crc kubenswrapper[4879]: E1125 14:26:21.644986 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.706340 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.706385 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.706401 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.706424 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.706441 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.809368 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.809402 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.809413 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.809430 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.809443 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.912220 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.912259 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.912268 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.912283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:21 crc kubenswrapper[4879]: I1125 14:26:21.912295 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:21Z","lastTransitionTime":"2025-11-25T14:26:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.014705 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.014746 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.014754 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.014770 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.014779 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.073461 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/0.log" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.073519 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerStarted","Data":"127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.088346 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.100537 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.112436 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.116839 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.117000 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.117076 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.117169 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.117250 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.123699 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.151063 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.162181 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.173274 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.186684 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.198241 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.210632 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:20Z\\\",\\\"message\\\":\\\"2025-11-25T14:25:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c\\\\n2025-11-25T14:25:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c to /host/opt/cni/bin/\\\\n2025-11-25T14:25:35Z [verbose] multus-daemon started\\\\n2025-11-25T14:25:35Z [verbose] Readiness Indicator file check\\\\n2025-11-25T14:26:20Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:26:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.220602 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.220630 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.220643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.220660 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.220672 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.225259 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.245903 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.258428 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.268818 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.279786 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.293482 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.306284 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.315857 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:22Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.323283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.323322 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.323332 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.323345 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.323356 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.426040 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.426089 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.426101 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.426145 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.426161 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.528969 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.529020 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.529032 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.529049 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.529060 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.631505 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.631749 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.631826 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.631898 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.631962 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.643818 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:22 crc kubenswrapper[4879]: E1125 14:26:22.644108 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.733842 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.733879 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.733891 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.733906 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.733916 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.836466 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.836503 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.836513 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.836527 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.836537 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.938806 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.938846 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.938862 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.938877 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:22 crc kubenswrapper[4879]: I1125 14:26:22.938890 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:22Z","lastTransitionTime":"2025-11-25T14:26:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.040938 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.040978 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.040990 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.041006 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.041017 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.143619 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.143665 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.143676 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.143693 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.143703 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.245984 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.246049 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.246064 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.246088 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.246102 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.348859 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.348902 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.348911 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.348925 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.348934 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.452332 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.452432 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.452452 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.452484 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.452504 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.554993 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.555049 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.555062 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.555080 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.555093 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.643717 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:23 crc kubenswrapper[4879]: E1125 14:26:23.643867 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.643748 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.643987 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:23 crc kubenswrapper[4879]: E1125 14:26:23.644027 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:23 crc kubenswrapper[4879]: E1125 14:26:23.644267 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.658570 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.658630 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.658643 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.658664 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.658678 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.760830 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.760875 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.760884 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.760899 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.760941 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.863416 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.863526 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.863542 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.863559 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.863572 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.966530 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.966591 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.966603 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.966624 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:23 crc kubenswrapper[4879]: I1125 14:26:23.966639 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:23Z","lastTransitionTime":"2025-11-25T14:26:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.069976 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.070020 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.070031 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.070048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.070059 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.173536 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.173594 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.173606 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.173630 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.173643 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.276984 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.277027 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.277036 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.277052 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.277066 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.380376 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.380436 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.380450 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.380474 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.380490 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.484724 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.484796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.484847 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.484876 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.484895 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.589070 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.589142 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.589152 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.589171 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.589181 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.644274 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:24 crc kubenswrapper[4879]: E1125 14:26:24.644686 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.692991 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.693057 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.693076 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.693107 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.693150 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.796180 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.796251 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.796266 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.796287 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.796305 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.899922 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.900003 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.900019 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.900046 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:24 crc kubenswrapper[4879]: I1125 14:26:24.900062 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:24Z","lastTransitionTime":"2025-11-25T14:26:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.003289 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.003356 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.003368 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.003408 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.003421 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.106330 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.106392 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.106403 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.106422 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.106435 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.209666 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.209738 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.209759 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.209788 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.209810 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.314293 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.314407 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.314437 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.314478 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.314548 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.417869 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.417953 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.417962 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.417981 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.417992 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.521795 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.521852 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.521865 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.521890 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.521903 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.624963 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.625018 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.625031 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.625050 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.625063 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.644618 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.644710 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:25 crc kubenswrapper[4879]: E1125 14:26:25.644759 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.644849 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.645669 4879 scope.go:117] "RemoveContainer" containerID="fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698" Nov 25 14:26:25 crc kubenswrapper[4879]: E1125 14:26:25.647319 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:25 crc kubenswrapper[4879]: E1125 14:26:25.647409 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.727680 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.727716 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.727729 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.727746 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.727797 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.830067 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.830094 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.830103 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.830115 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.830150 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.932767 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.932806 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.932816 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.932830 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:25 crc kubenswrapper[4879]: I1125 14:26:25.932840 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:25Z","lastTransitionTime":"2025-11-25T14:26:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.035045 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.035104 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.035115 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.035140 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.035149 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.089373 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/2.log" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.091181 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.092232 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.123084 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.136646 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.136893 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.136907 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.136914 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.136927 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.136936 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.148211 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.157786 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.174788 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.186285 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.195462 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.207188 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.219555 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.233091 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.238809 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.238850 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.238861 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.238876 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.238886 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.246741 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.265371 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:26:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.284939 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.301626 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.314464 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.327137 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.339794 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:20Z\\\",\\\"message\\\":\\\"2025-11-25T14:25:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c\\\\n2025-11-25T14:25:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c to /host/opt/cni/bin/\\\\n2025-11-25T14:25:35Z [verbose] multus-daemon started\\\\n2025-11-25T14:25:35Z [verbose] Readiness Indicator file check\\\\n2025-11-25T14:26:20Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:26:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.340865 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.340902 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.340910 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.340924 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.340933 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.350480 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.443554 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.443614 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.443632 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.443657 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.443679 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.545832 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.546223 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.546402 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.546550 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.546677 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.644502 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:26 crc kubenswrapper[4879]: E1125 14:26:26.645039 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.649814 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.649882 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.649900 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.649926 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.649942 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.753066 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.753114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.753151 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.753174 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.753188 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.856686 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.856766 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.856792 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.856839 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.856862 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.965204 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.965244 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.965253 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.965268 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:26 crc kubenswrapper[4879]: I1125 14:26:26.965277 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:26Z","lastTransitionTime":"2025-11-25T14:26:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.069877 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.069915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.069928 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.069944 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.069956 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.100417 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/3.log" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.101591 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/2.log" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.104704 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" exitCode=1 Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.104801 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.104841 4879 scope.go:117] "RemoveContainer" containerID="fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.106722 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:26:27 crc kubenswrapper[4879]: E1125 14:26:27.107050 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.121549 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.131733 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.144704 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.158505 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.171572 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.172815 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.172893 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.172909 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.172928 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.172940 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.182660 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.201968 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc40d2ea7e9a6b049e6d7b127eb690beb7614e73d721af7e8de8bd69abd16698\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:25:58Z\\\",\\\"message\\\":\\\"LBGroup\\\\\\\"}}}\\\\nI1125 14:25:58.621747 6556 services_controller.go:452] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics per-node LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621754 6556 services_controller.go:453] Built service openshift-operator-lifecycle-manager/catalog-operator-metrics template LB for network=default: []services.LB{}\\\\nI1125 14:25:58.621760 6556 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1125 14:25:58.621771 6556 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-operator-lifecycle-manager/catalog-operator-metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"78f6184b-c7cf-436d-8cbb-4b31f8af75e8\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-operator-lifecycle-manager/catalog-operator-metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:27Z\\\",\\\"message\\\":\\\" all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1125 14:26:26.498678 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-48cv4\\\\nI1125 14:26:26.498687 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-64t7t\\\\nI1125 14:26:26.498687 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-g7f9p\\\\nF1125 14:26:26.498692 6917 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z]\\\\nI1125 14:26:26.498701 6917 obj_retry.go:365] Adding new object: *v1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:26:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.212784 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.222745 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.234753 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.246565 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.261050 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:20Z\\\",\\\"message\\\":\\\"2025-11-25T14:25:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c\\\\n2025-11-25T14:25:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c to /host/opt/cni/bin/\\\\n2025-11-25T14:25:35Z [verbose] multus-daemon started\\\\n2025-11-25T14:25:35Z [verbose] Readiness Indicator file check\\\\n2025-11-25T14:26:20Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:26:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.272845 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.276296 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.276327 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.276337 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.276353 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.276361 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.295027 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.308195 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.317916 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.328355 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.342199 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:27Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.378411 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.378446 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.378455 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.378468 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.378477 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.480963 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.481021 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.481036 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.481057 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.481070 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.583291 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.583328 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.583337 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.583350 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.583359 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.644676 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.644688 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.644922 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:27 crc kubenswrapper[4879]: E1125 14:26:27.645000 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:27 crc kubenswrapper[4879]: E1125 14:26:27.644870 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:27 crc kubenswrapper[4879]: E1125 14:26:27.645218 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.687188 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.687251 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.687270 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.687292 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.687310 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.790148 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.790194 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.790211 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.790231 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.790248 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.892674 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.892712 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.892725 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.892741 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.892753 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.995479 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.995641 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.995655 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.995671 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:27 crc kubenswrapper[4879]: I1125 14:26:27.995681 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:27Z","lastTransitionTime":"2025-11-25T14:26:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.098780 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.099207 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.099422 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.099634 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.099842 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.108928 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/3.log" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.112476 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.112625 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.124704 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.136685 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.150195 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.161674 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.183933 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:27Z\\\",\\\"message\\\":\\\" all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1125 14:26:26.498678 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-48cv4\\\\nI1125 14:26:26.498687 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-64t7t\\\\nI1125 14:26:26.498687 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-g7f9p\\\\nF1125 14:26:26.498692 6917 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z]\\\\nI1125 14:26:26.498701 6917 obj_retry.go:365] Adding new object: *v1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:26:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.197569 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.202406 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.202479 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.202492 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.202508 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.202520 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.211038 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.226608 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.241416 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.257196 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:20Z\\\",\\\"message\\\":\\\"2025-11-25T14:25:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c\\\\n2025-11-25T14:25:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c to /host/opt/cni/bin/\\\\n2025-11-25T14:25:35Z [verbose] multus-daemon started\\\\n2025-11-25T14:25:35Z [verbose] Readiness Indicator file check\\\\n2025-11-25T14:26:20Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:26:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.272696 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.302872 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.305496 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.305560 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.305580 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.305605 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.305625 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.321059 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.333031 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.333085 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.333100 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.333139 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.333156 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.335888 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.348606 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.350678 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.352925 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.353002 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.353021 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.353054 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.353073 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.365808 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.366720 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.370512 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.370559 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.370577 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.370600 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.370612 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.379105 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.384819 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.389494 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.389548 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.389561 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.389582 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.389593 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.392201 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.404339 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.410534 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.410598 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.410618 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.410644 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.410666 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.424405 4879 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fb0103eb-cf5f-48db-9ca5-77a21e79fac0\\\",\\\"systemUUID\\\":\\\"613b3c94-49dd-4904-bdaf-1b6a10ba13f7\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:28Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.424573 4879 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.426740 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.426794 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.426806 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.426825 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.426837 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.529180 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.529239 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.529251 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.529270 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.529283 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.632405 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.632480 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.632491 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.632506 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.632520 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.644118 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:28 crc kubenswrapper[4879]: E1125 14:26:28.644553 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.734911 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.734968 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.734988 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.735013 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.735031 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.838105 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.838170 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.838186 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.838207 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.838223 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.941060 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.941149 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.941170 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.941190 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:28 crc kubenswrapper[4879]: I1125 14:26:28.941199 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:28Z","lastTransitionTime":"2025-11-25T14:26:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.044687 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.044751 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.044768 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.044792 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.044809 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.148744 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.148803 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.148820 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.148847 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.148868 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.251699 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.251743 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.251757 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.251774 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.251786 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.354202 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.354263 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.354280 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.354301 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.354314 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.457357 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.457838 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.457854 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.457869 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.457880 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.560583 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.560922 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.560940 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.560957 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.560972 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.644054 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:29 crc kubenswrapper[4879]: E1125 14:26:29.644440 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.644210 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:29 crc kubenswrapper[4879]: E1125 14:26:29.644711 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.644189 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:29 crc kubenswrapper[4879]: E1125 14:26:29.644896 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.657639 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e206572db87afabcc2a44a23ddd899215c6c0f9d5dacecfb67da43581d76bb4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.663584 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.663623 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.663634 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.663747 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.663769 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.667500 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c8p75" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6837743-8dfe-448b-988a-54e78cdd5c57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://948bef17e63f5de2ad9bd9d2ff2cab0de1b3fe71fb485ac2063556f29131a81e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsbjn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:34Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c8p75\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.678712 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ecb034ac-0ac4-4fa8-a14c-500728bcab67\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb2bc3afdbcc5905f6836fc469296918036b57940fab50d9e76c637221f685bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff106a60ef9876d85f1476d189dca335e9a0cb849ba75f0f66dca34773a08bd2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db7ba869f14cb2fe4c67cf6696274a1d90161fc717a8a5aed05b0649e6a650\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.690494 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.702162 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.713169 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.729278 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5301133b-1830-45bc-a55e-7c3e97907bb9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:27Z\\\",\\\"message\\\":\\\" all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1125 14:26:26.498678 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-48cv4\\\\nI1125 14:26:26.498687 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-64t7t\\\\nI1125 14:26:26.498687 6917 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-g7f9p\\\\nF1125 14:26:26.498692 6917 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:26Z is after 2025-08-24T17:21:41Z]\\\\nI1125 14:26:26.498701 6917 obj_retry.go:365] Adding new object: *v1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:26:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7zrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-g7f9p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.740272 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-48cv4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"960966b7-77d2-49d8-bfcc-2aa44e032f8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kg26z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:45Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-48cv4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.755287 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf1495a-78dc-4040-b265-01d6443c8584\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ea3664295645a2ffb80138d7c0b05284a3ac02ef573539276a5db8e9131ac73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0091e7752262af0e6b16f964e0cf510e3fb96c83b1f304a0685fb85ff3aaafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fbbb489f836f991c95ec5e07f3952d8439a2996d39a36a84ab84c663278f7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae550bcaebb1518f93013d0f7fa35bbf3525bd468eb6592a1304d9ccdc2f8ada\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.766394 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.766428 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.766440 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.766452 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.766461 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.768083 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://87818c644bd8b2e172ba70c5c8ddb0a559ff185dd1ff4f1e233c164d8123ba11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9acc2a90315bfdb4f7dd641cc250ef56316af2e111da7e7e0910893c28c25aec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.802213 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dd90ce48277da3e956899c07f1f8c40966471fbcb9a5fdb7e4cc90a335743ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.825979 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8m8g8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f1eafdec-4c5a-4e91-97b4-a117c35838d4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T14:26:20Z\\\",\\\"message\\\":\\\"2025-11-25T14:25:34+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c\\\\n2025-11-25T14:25:34+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_28402e8a-1520-47ae-9f90-ed2b9578a64c to /host/opt/cni/bin/\\\\n2025-11-25T14:25:35Z [verbose] multus-daemon started\\\\n2025-11-25T14:25:35Z [verbose] Readiness Indicator file check\\\\n2025-11-25T14:26:20Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:26:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-httx4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8m8g8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.836828 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e92a55f6-51a0-4f4f-b5eb-594867b91b4a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7fc34581c597724effb4cf21ed3720f3693f4721ccc8f9ccb73e817e728625\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77368a187b284c2ac70db6e309d49205edb1fe52fb3f064fd6761edad60b1d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cl78c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wf76s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.855308 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f622441a-ea10-4164-9bc8-1dc26af90bca\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2344f9bbb56ef783c3913327e9afd9933a3c8b907af91349d10261e5fe925ed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://491d3e7e778bf8dab2887546b27c90697e7501bea533c71c3549cb0377f9f624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc84e59d77b7ace3577b6c1ea49e72b4ee6fb25cf7eab08bfd836fbb32856818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7be959c844c9c58aae8cf936513c8b24fd755308d07b2d2d75956edd4fd905ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dba41613872c07ad1a039ee28193fbd2e0b94790cd11903ef1a2c8e4dd4535ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e37677fbe47745cd3ca7cceb0be0464de632cf1101b3378d6ae132976b75882a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a99f48774d414d2ddd2c8b6d249f3475d0f9a45a53078b60bd5bc0318964151\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baf40f483eb7200417a968076f0edb8d48dcbdded376b6fba286faa7336630f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.867986 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a357275d-a98b-4ba6-8649-6367ecb5401f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:26:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T14:25:30Z\\\",\\\"message\\\":\\\"nt-ca-file\\\\nI1125 14:25:30.964873 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 14:25:30.964921 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 14:25:30.964936 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 14:25:30.964967 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 14:25:30.964977 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1125 14:25:30.965087 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764080716\\\\\\\\\\\\\\\" (2025-11-25 14:25:15 +0000 UTC to 2025-12-25 14:25:16 +0000 UTC (now=2025-11-25 14:25:30.965058975 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965222 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764080730\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764080730\\\\\\\\\\\\\\\" (2025-11-25 13:25:30 +0000 UTC to 2026-11-25 13:25:30 +0000 UTC (now=2025-11-25 14:25:30.96520814 +0000 UTC))\\\\\\\"\\\\nI1125 14:25:30.965242 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 14:25:30.965257 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 14:25:30.965272 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3909593243/tls.crt::/tmp/serving-cert-3909593243/tls.key\\\\\\\"\\\\nI1125 14:25:30.965299 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF1125 14:25:30.965610 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:13Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:09Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.869278 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.869338 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.869348 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.869363 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.869373 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.877277 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gnxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bcc19ab6-d0dc-465d-90c3-1599a3b4bba0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd96aec246b1dd6db78f2bb87003ec81b160acbe7b8b7056d7600e36d9c683e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mx5l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gnxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.887320 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1f8529f4-b6ae-4467-ad94-67b1113f9d6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d75cb9810bd4248bf74e96150b115ec218b77f58ced25e3cad459e8c663816b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dhfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-64t7t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.903246 4879 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac63f434-7d8f-471d-8f1e-e1dc48ecb71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T14:25:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9dd64274706d4009316befa5fca4aef2b08012408ce2ec85edb3c7b60388a8c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T14:25:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f89e8f757443f136a857b007a5d89e4a8e54252e7f90ac6abbccb1e7d863cdd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c582f1b013f7b8af31eaa3403eb744e34e5d717ab3e8eb899c4c03138ea93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://571a7f5d29111b12060efa0bbe1e8d99b8b7e2f208b3a90084558d68d904661c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d558c1813b098550fc578f99c94775726d0e1baa38dce9249739f6b37d38885\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa5b79d5029254b4c4ac39cddc8aad0aa018694cc0fc98fa36e9e2ae8ad69771\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e4eeb8a15454e841f33f1b944b54a7b6440725b733be13fc8f9af6a2998aa51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T14:25:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T14:25:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bv4fz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T14:25:32Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-4xdgc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T14:26:29Z is after 2025-08-24T17:21:41Z" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.971563 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.971594 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.971603 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.971616 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:29 crc kubenswrapper[4879]: I1125 14:26:29.971624 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:29Z","lastTransitionTime":"2025-11-25T14:26:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.073497 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.073615 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.073628 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.073646 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.073657 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.176072 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.176111 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.176321 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.176336 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.176345 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.278699 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.278757 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.278772 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.278796 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.278812 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.382669 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.382730 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.382749 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.382776 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.382795 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.485533 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.485576 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.485588 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.485604 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.485615 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.587715 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.587753 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.587764 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.587777 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.587787 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.644672 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:30 crc kubenswrapper[4879]: E1125 14:26:30.644798 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.690071 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.690114 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.690150 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.690171 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.690198 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.792683 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.792733 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.792748 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.792770 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.792785 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.894794 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.894827 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.894835 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.894850 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.894858 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.998281 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.998335 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.998350 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.998371 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:30 crc kubenswrapper[4879]: I1125 14:26:30.998387 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:30Z","lastTransitionTime":"2025-11-25T14:26:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.100280 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.100338 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.100348 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.100364 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.100376 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.203653 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.203708 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.203721 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.203737 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.203747 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.308731 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.308775 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.308785 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.308801 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.308811 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.412511 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.412555 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.412564 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.412581 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.412593 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.515053 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.515115 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.515149 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.515167 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.515180 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.618325 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.618359 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.618368 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.618384 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.618393 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.644210 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.644285 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:31 crc kubenswrapper[4879]: E1125 14:26:31.644370 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:31 crc kubenswrapper[4879]: E1125 14:26:31.644418 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.644886 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:31 crc kubenswrapper[4879]: E1125 14:26:31.645158 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.721011 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.721069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.721081 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.721103 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.721116 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.824111 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.824522 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.824666 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.824819 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.824956 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.927348 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.927400 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.927420 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.927444 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:31 crc kubenswrapper[4879]: I1125 14:26:31.927463 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:31Z","lastTransitionTime":"2025-11-25T14:26:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.030456 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.030494 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.030507 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.030523 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.030534 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.133179 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.133711 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.133888 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.134118 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.134400 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.237780 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.237853 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.237867 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.237888 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.237907 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.340881 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.340955 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.340976 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.341005 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.341024 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.443654 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.443695 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.443703 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.443716 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.443726 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.546309 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.546344 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.546354 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.546368 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.546376 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.643933 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:32 crc kubenswrapper[4879]: E1125 14:26:32.644061 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.649296 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.649341 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.649349 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.649364 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.649376 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.752805 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.752845 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.752883 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.752901 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.752914 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.855493 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.855536 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.855545 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.855560 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.855569 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.958497 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.958564 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.958581 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.958604 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:32 crc kubenswrapper[4879]: I1125 14:26:32.958620 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:32Z","lastTransitionTime":"2025-11-25T14:26:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.061693 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.061738 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.061756 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.061779 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.061791 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.169194 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.169240 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.169248 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.169265 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.169274 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.271915 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.271959 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.271968 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.271983 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.271992 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.374893 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.375001 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.375020 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.375044 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.375065 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.477010 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.477048 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.477058 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.477073 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.477082 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.578930 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.578964 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.578972 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.578984 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.578992 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.643796 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:33 crc kubenswrapper[4879]: E1125 14:26:33.643938 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.643824 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.643805 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:33 crc kubenswrapper[4879]: E1125 14:26:33.644191 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:33 crc kubenswrapper[4879]: E1125 14:26:33.644245 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.681416 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.681461 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.681472 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.681492 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.681502 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.784330 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.784379 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.784391 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.784409 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.784422 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.887650 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.887706 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.887719 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.887738 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.887750 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.990556 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.990584 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.990592 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.990605 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:33 crc kubenswrapper[4879]: I1125 14:26:33.990612 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:33Z","lastTransitionTime":"2025-11-25T14:26:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.092748 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.092795 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.092806 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.092821 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.092832 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.194869 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.194911 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.194920 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.194936 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.194945 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.298194 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.298265 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.298288 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.298334 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.298358 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.401410 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.401446 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.401457 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.401471 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.401483 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.505283 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.505352 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.505378 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.505410 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.505469 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.609586 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.609661 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.609685 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.609720 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.609743 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.644050 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:34 crc kubenswrapper[4879]: E1125 14:26:34.644217 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.712570 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.712639 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.712658 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.712687 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.712705 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.816986 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.817075 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.817104 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.817273 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.817340 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.920909 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.920954 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.920962 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.920979 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:34 crc kubenswrapper[4879]: I1125 14:26:34.920988 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:34Z","lastTransitionTime":"2025-11-25T14:26:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.023831 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.023867 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.023875 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.023888 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.023898 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.126908 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.126961 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.126980 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.127002 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.127014 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.230961 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.231037 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.231053 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.231078 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.231096 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.335041 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.335098 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.335106 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.335124 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.335154 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.436292 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.436597 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.436535536 +0000 UTC m=+151.039948657 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.438237 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.438391 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.438490 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.438594 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.438683 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.538270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.538798 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.538983 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.538515 4879 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.539177 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539251 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539513 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.538989 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539566 4879 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539624 4879 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539675 4879 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539331 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.539293037 +0000 UTC m=+151.142706108 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539834 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.53979273 +0000 UTC m=+151.143205981 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.539861 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.539849372 +0000 UTC m=+151.143262473 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.540282 4879 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.540345 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.540333545 +0000 UTC m=+151.143746826 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.541610 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.541654 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.541775 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.541812 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.541831 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.643719 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.643793 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.643880 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.643888 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.643992 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:35 crc kubenswrapper[4879]: E1125 14:26:35.644246 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.645003 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.645034 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.645043 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.645057 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.645073 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.746989 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.747033 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.747047 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.747064 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.747075 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.850184 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.850246 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.850264 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.850287 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.850303 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.952967 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.953025 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.953044 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.953070 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:35 crc kubenswrapper[4879]: I1125 14:26:35.953088 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:35Z","lastTransitionTime":"2025-11-25T14:26:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.056841 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.056895 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.056906 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.056922 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.056931 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.159077 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.159143 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.159157 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.159172 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.159180 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.261273 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.261343 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.261365 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.261393 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.261428 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.363836 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.363873 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.363882 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.363895 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.363904 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.466630 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.466671 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.466680 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.466695 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.466706 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.569519 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.569570 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.569584 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.569600 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.569612 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.644646 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:36 crc kubenswrapper[4879]: E1125 14:26:36.644912 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.672747 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.672907 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.672925 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.672943 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.672955 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.776058 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.776105 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.776113 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.776150 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.776163 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.880510 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.880562 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.880573 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.880592 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.880604 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.984032 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.984069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.984077 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.984091 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:36 crc kubenswrapper[4879]: I1125 14:26:36.984099 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:36Z","lastTransitionTime":"2025-11-25T14:26:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.087780 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.087872 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.087896 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.087926 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.087949 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.190847 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.190914 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.190931 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.190960 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.190984 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.295266 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.295336 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.295349 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.295366 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.295379 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.397308 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.397386 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.397404 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.397432 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.397452 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.500364 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.500434 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.500452 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.500478 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.500497 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.603896 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.603944 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.603958 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.604002 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.604013 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.645461 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.645515 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:37 crc kubenswrapper[4879]: E1125 14:26:37.645693 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.645732 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:37 crc kubenswrapper[4879]: E1125 14:26:37.645971 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:37 crc kubenswrapper[4879]: E1125 14:26:37.646093 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.706181 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.706233 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.706249 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.706269 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.706283 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.809025 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.809091 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.809159 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.809191 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.809207 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.911632 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.911683 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.911694 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.911711 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:37 crc kubenswrapper[4879]: I1125 14:26:37.911723 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:37Z","lastTransitionTime":"2025-11-25T14:26:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.015082 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.015198 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.015212 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.015229 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.015238 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.117542 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.117583 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.117592 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.117606 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.117615 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.220344 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.220400 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.220417 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.220438 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.220451 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.323621 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.323665 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.323676 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.323696 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.323709 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.426999 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.427041 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.427053 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.427069 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.427081 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.529513 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.529558 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.529569 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.529585 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.529595 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.580594 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.580633 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.580644 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.580659 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.580672 4879 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T14:26:38Z","lastTransitionTime":"2025-11-25T14:26:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.630951 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp"] Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.631346 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.634161 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.634364 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.635254 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.635328 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.644571 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:38 crc kubenswrapper[4879]: E1125 14:26:38.644788 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.657973 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.688083 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.688195 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.688224 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.688246 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.688268 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.712073 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=64.712051427 podStartE2EDuration="1m4.712051427s" podCreationTimestamp="2025-11-25 14:25:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.700937089 +0000 UTC m=+90.304350160" watchObservedRunningTime="2025-11-25 14:26:38.712051427 +0000 UTC m=+90.315464498" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.768272 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-8m8g8" podStartSLOduration=67.768253422 podStartE2EDuration="1m7.768253422s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.756064407 +0000 UTC m=+90.359477488" watchObservedRunningTime="2025-11-25 14:26:38.768253422 +0000 UTC m=+90.371666493" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.768449 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wf76s" podStartSLOduration=66.768445687 podStartE2EDuration="1m6.768445687s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.768386585 +0000 UTC m=+90.371799656" watchObservedRunningTime="2025-11-25 14:26:38.768445687 +0000 UTC m=+90.371858758" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.782681 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=41.782652006 podStartE2EDuration="41.782652006s" podCreationTimestamp="2025-11-25 14:25:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.782257255 +0000 UTC m=+90.385670336" watchObservedRunningTime="2025-11-25 14:26:38.782652006 +0000 UTC m=+90.386065077" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789713 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789776 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789813 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789858 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789879 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789968 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.789995 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.791314 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.804111 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.814001 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-ttbqp\" (UID: \"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.859634 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=67.859612079 podStartE2EDuration="1m7.859612079s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.858542321 +0000 UTC m=+90.461955392" watchObservedRunningTime="2025-11-25 14:26:38.859612079 +0000 UTC m=+90.463025150" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.859923 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-4xdgc" podStartSLOduration=67.859919367 podStartE2EDuration="1m7.859919367s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.836463079 +0000 UTC m=+90.439876160" watchObservedRunningTime="2025-11-25 14:26:38.859919367 +0000 UTC m=+90.463332438" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.884644 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=67.884616236 podStartE2EDuration="1m7.884616236s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.874611097 +0000 UTC m=+90.478024168" watchObservedRunningTime="2025-11-25 14:26:38.884616236 +0000 UTC m=+90.488029307" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.898243 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-gnxj7" podStartSLOduration=68.898224009 podStartE2EDuration="1m8.898224009s" podCreationTimestamp="2025-11-25 14:25:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.885503659 +0000 UTC m=+90.488916720" watchObservedRunningTime="2025-11-25 14:26:38.898224009 +0000 UTC m=+90.501637080" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.916556 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podStartSLOduration=67.916530853 podStartE2EDuration="1m7.916530853s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.899310737 +0000 UTC m=+90.502723808" watchObservedRunningTime="2025-11-25 14:26:38.916530853 +0000 UTC m=+90.519943924" Nov 25 14:26:38 crc kubenswrapper[4879]: I1125 14:26:38.948105 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.144319 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" event={"ID":"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6","Type":"ContainerStarted","Data":"cede2dfdfb449917632c310199425b0b4531c635189a9a6e6795e3e04c8b270b"} Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.144435 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" event={"ID":"d20e4cbb-49d8-4bd0-9ffa-1eb8d22813f6","Type":"ContainerStarted","Data":"8970725e23c05202ca14bc50e94531a8e0b2687bdd51bf51000e053b23cec476"} Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.159797 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-c8p75" podStartSLOduration=68.159773393 podStartE2EDuration="1m8.159773393s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:38.932140887 +0000 UTC m=+90.535553948" watchObservedRunningTime="2025-11-25 14:26:39.159773393 +0000 UTC m=+90.763186464" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.175210 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-ttbqp" podStartSLOduration=68.175178432 podStartE2EDuration="1m8.175178432s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:39.160844171 +0000 UTC m=+90.764257242" watchObservedRunningTime="2025-11-25 14:26:39.175178432 +0000 UTC m=+90.778591533" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.175395 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.175388528 podStartE2EDuration="1.175388528s" podCreationTimestamp="2025-11-25 14:26:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:26:39.174524015 +0000 UTC m=+90.777937096" watchObservedRunningTime="2025-11-25 14:26:39.175388528 +0000 UTC m=+90.778801639" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.644394 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:39 crc kubenswrapper[4879]: E1125 14:26:39.644716 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.644762 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.644863 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:39 crc kubenswrapper[4879]: E1125 14:26:39.648310 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:39 crc kubenswrapper[4879]: E1125 14:26:39.648452 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:39 crc kubenswrapper[4879]: I1125 14:26:39.649334 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:26:39 crc kubenswrapper[4879]: E1125 14:26:39.649646 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:26:40 crc kubenswrapper[4879]: I1125 14:26:40.644209 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:40 crc kubenswrapper[4879]: E1125 14:26:40.644357 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:41 crc kubenswrapper[4879]: I1125 14:26:41.644143 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:41 crc kubenswrapper[4879]: E1125 14:26:41.644289 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:41 crc kubenswrapper[4879]: I1125 14:26:41.644481 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:41 crc kubenswrapper[4879]: E1125 14:26:41.644548 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:41 crc kubenswrapper[4879]: I1125 14:26:41.644676 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:41 crc kubenswrapper[4879]: E1125 14:26:41.644733 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:42 crc kubenswrapper[4879]: I1125 14:26:42.644175 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:42 crc kubenswrapper[4879]: E1125 14:26:42.644901 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:43 crc kubenswrapper[4879]: I1125 14:26:43.644413 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:43 crc kubenswrapper[4879]: I1125 14:26:43.644446 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:43 crc kubenswrapper[4879]: E1125 14:26:43.644562 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:43 crc kubenswrapper[4879]: E1125 14:26:43.644664 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:43 crc kubenswrapper[4879]: I1125 14:26:43.645288 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:43 crc kubenswrapper[4879]: E1125 14:26:43.645454 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:44 crc kubenswrapper[4879]: I1125 14:26:44.643664 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:44 crc kubenswrapper[4879]: E1125 14:26:44.643891 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:45 crc kubenswrapper[4879]: I1125 14:26:45.643820 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:45 crc kubenswrapper[4879]: I1125 14:26:45.643890 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:45 crc kubenswrapper[4879]: I1125 14:26:45.644109 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:45 crc kubenswrapper[4879]: E1125 14:26:45.644253 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:45 crc kubenswrapper[4879]: E1125 14:26:45.644330 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:45 crc kubenswrapper[4879]: E1125 14:26:45.644409 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:46 crc kubenswrapper[4879]: I1125 14:26:46.644429 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:46 crc kubenswrapper[4879]: E1125 14:26:46.644579 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:47 crc kubenswrapper[4879]: I1125 14:26:47.644455 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:47 crc kubenswrapper[4879]: I1125 14:26:47.644478 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:47 crc kubenswrapper[4879]: E1125 14:26:47.644656 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:47 crc kubenswrapper[4879]: I1125 14:26:47.644703 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:47 crc kubenswrapper[4879]: E1125 14:26:47.644883 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:47 crc kubenswrapper[4879]: E1125 14:26:47.644943 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:48 crc kubenswrapper[4879]: I1125 14:26:48.644594 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:48 crc kubenswrapper[4879]: E1125 14:26:48.644788 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:49 crc kubenswrapper[4879]: I1125 14:26:49.496431 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:49 crc kubenswrapper[4879]: E1125 14:26:49.496673 4879 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:26:49 crc kubenswrapper[4879]: E1125 14:26:49.496758 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs podName:960966b7-77d2-49d8-bfcc-2aa44e032f8c nodeName:}" failed. No retries permitted until 2025-11-25 14:27:53.496733709 +0000 UTC m=+165.100146790 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs") pod "network-metrics-daemon-48cv4" (UID: "960966b7-77d2-49d8-bfcc-2aa44e032f8c") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 14:26:49 crc kubenswrapper[4879]: I1125 14:26:49.643649 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:49 crc kubenswrapper[4879]: I1125 14:26:49.644563 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:49 crc kubenswrapper[4879]: I1125 14:26:49.644605 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:49 crc kubenswrapper[4879]: E1125 14:26:49.644676 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:49 crc kubenswrapper[4879]: E1125 14:26:49.644550 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:49 crc kubenswrapper[4879]: E1125 14:26:49.644763 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:50 crc kubenswrapper[4879]: I1125 14:26:50.643808 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:50 crc kubenswrapper[4879]: E1125 14:26:50.644776 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:50 crc kubenswrapper[4879]: I1125 14:26:50.645996 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:26:50 crc kubenswrapper[4879]: E1125 14:26:50.646468 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:26:51 crc kubenswrapper[4879]: I1125 14:26:51.644665 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:51 crc kubenswrapper[4879]: I1125 14:26:51.644665 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:51 crc kubenswrapper[4879]: E1125 14:26:51.645099 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:51 crc kubenswrapper[4879]: E1125 14:26:51.645002 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:51 crc kubenswrapper[4879]: I1125 14:26:51.644694 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:51 crc kubenswrapper[4879]: E1125 14:26:51.645211 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:52 crc kubenswrapper[4879]: I1125 14:26:52.644483 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:52 crc kubenswrapper[4879]: E1125 14:26:52.645060 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:53 crc kubenswrapper[4879]: I1125 14:26:53.643690 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:53 crc kubenswrapper[4879]: I1125 14:26:53.643748 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:53 crc kubenswrapper[4879]: I1125 14:26:53.643825 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:53 crc kubenswrapper[4879]: E1125 14:26:53.643920 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:53 crc kubenswrapper[4879]: E1125 14:26:53.644002 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:53 crc kubenswrapper[4879]: E1125 14:26:53.644085 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:54 crc kubenswrapper[4879]: I1125 14:26:54.644291 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:54 crc kubenswrapper[4879]: E1125 14:26:54.644491 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:55 crc kubenswrapper[4879]: I1125 14:26:55.644454 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:55 crc kubenswrapper[4879]: I1125 14:26:55.644475 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:55 crc kubenswrapper[4879]: E1125 14:26:55.644576 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:55 crc kubenswrapper[4879]: I1125 14:26:55.644650 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:55 crc kubenswrapper[4879]: E1125 14:26:55.644826 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:55 crc kubenswrapper[4879]: E1125 14:26:55.644932 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:56 crc kubenswrapper[4879]: I1125 14:26:56.644460 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:56 crc kubenswrapper[4879]: E1125 14:26:56.644701 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:57 crc kubenswrapper[4879]: I1125 14:26:57.644584 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:57 crc kubenswrapper[4879]: I1125 14:26:57.644675 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:57 crc kubenswrapper[4879]: I1125 14:26:57.644591 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:57 crc kubenswrapper[4879]: E1125 14:26:57.644802 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:57 crc kubenswrapper[4879]: E1125 14:26:57.645027 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:57 crc kubenswrapper[4879]: E1125 14:26:57.645320 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:26:58 crc kubenswrapper[4879]: I1125 14:26:58.643849 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:26:58 crc kubenswrapper[4879]: E1125 14:26:58.644752 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:26:59 crc kubenswrapper[4879]: I1125 14:26:59.644309 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:26:59 crc kubenswrapper[4879]: I1125 14:26:59.644380 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:26:59 crc kubenswrapper[4879]: I1125 14:26:59.646592 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:26:59 crc kubenswrapper[4879]: E1125 14:26:59.646568 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:26:59 crc kubenswrapper[4879]: E1125 14:26:59.646829 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:26:59 crc kubenswrapper[4879]: E1125 14:26:59.646978 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:00 crc kubenswrapper[4879]: I1125 14:27:00.644035 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:00 crc kubenswrapper[4879]: E1125 14:27:00.644167 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:01 crc kubenswrapper[4879]: I1125 14:27:01.644713 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:01 crc kubenswrapper[4879]: I1125 14:27:01.644827 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:01 crc kubenswrapper[4879]: I1125 14:27:01.644988 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:01 crc kubenswrapper[4879]: E1125 14:27:01.645574 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:01 crc kubenswrapper[4879]: E1125 14:27:01.645656 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:01 crc kubenswrapper[4879]: E1125 14:27:01.645724 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:02 crc kubenswrapper[4879]: I1125 14:27:02.643822 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:02 crc kubenswrapper[4879]: E1125 14:27:02.643945 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:03 crc kubenswrapper[4879]: I1125 14:27:03.644492 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:03 crc kubenswrapper[4879]: E1125 14:27:03.644678 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:03 crc kubenswrapper[4879]: I1125 14:27:03.645022 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:03 crc kubenswrapper[4879]: E1125 14:27:03.645274 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:03 crc kubenswrapper[4879]: I1125 14:27:03.645023 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:03 crc kubenswrapper[4879]: E1125 14:27:03.645460 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:04 crc kubenswrapper[4879]: I1125 14:27:04.643770 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:04 crc kubenswrapper[4879]: E1125 14:27:04.644003 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:04 crc kubenswrapper[4879]: I1125 14:27:04.645247 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:27:04 crc kubenswrapper[4879]: E1125 14:27:04.645521 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-g7f9p_openshift-ovn-kubernetes(5301133b-1830-45bc-a55e-7c3e97907bb9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" Nov 25 14:27:05 crc kubenswrapper[4879]: I1125 14:27:05.644433 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:05 crc kubenswrapper[4879]: I1125 14:27:05.644494 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:05 crc kubenswrapper[4879]: I1125 14:27:05.644443 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:05 crc kubenswrapper[4879]: E1125 14:27:05.644678 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:05 crc kubenswrapper[4879]: E1125 14:27:05.644823 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:05 crc kubenswrapper[4879]: E1125 14:27:05.645060 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:06 crc kubenswrapper[4879]: I1125 14:27:06.643569 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:06 crc kubenswrapper[4879]: E1125 14:27:06.643930 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.235323 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/1.log" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.236014 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/0.log" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.236083 4879 generic.go:334] "Generic (PLEG): container finished" podID="f1eafdec-4c5a-4e91-97b4-a117c35838d4" containerID="127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255" exitCode=1 Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.236166 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerDied","Data":"127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255"} Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.236229 4879 scope.go:117] "RemoveContainer" containerID="9fcca8b665e13b2a64d452f7b68ed1de82d13651400c92b62929fea34e230ec3" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.236721 4879 scope.go:117] "RemoveContainer" containerID="127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255" Nov 25 14:27:07 crc kubenswrapper[4879]: E1125 14:27:07.236892 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-8m8g8_openshift-multus(f1eafdec-4c5a-4e91-97b4-a117c35838d4)\"" pod="openshift-multus/multus-8m8g8" podUID="f1eafdec-4c5a-4e91-97b4-a117c35838d4" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.644055 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:07 crc kubenswrapper[4879]: E1125 14:27:07.644340 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.644465 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:07 crc kubenswrapper[4879]: I1125 14:27:07.644589 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:07 crc kubenswrapper[4879]: E1125 14:27:07.644642 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:07 crc kubenswrapper[4879]: E1125 14:27:07.644771 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:08 crc kubenswrapper[4879]: I1125 14:27:08.243545 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/1.log" Nov 25 14:27:08 crc kubenswrapper[4879]: I1125 14:27:08.644319 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:08 crc kubenswrapper[4879]: E1125 14:27:08.644516 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:09 crc kubenswrapper[4879]: E1125 14:27:09.633168 4879 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 25 14:27:09 crc kubenswrapper[4879]: I1125 14:27:09.643730 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:09 crc kubenswrapper[4879]: I1125 14:27:09.643744 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:09 crc kubenswrapper[4879]: I1125 14:27:09.645211 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:09 crc kubenswrapper[4879]: E1125 14:27:09.645196 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:09 crc kubenswrapper[4879]: E1125 14:27:09.645328 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:09 crc kubenswrapper[4879]: E1125 14:27:09.645424 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:09 crc kubenswrapper[4879]: E1125 14:27:09.778374 4879 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 14:27:10 crc kubenswrapper[4879]: I1125 14:27:10.644522 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:10 crc kubenswrapper[4879]: E1125 14:27:10.644643 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:11 crc kubenswrapper[4879]: I1125 14:27:11.644468 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:11 crc kubenswrapper[4879]: I1125 14:27:11.644521 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:11 crc kubenswrapper[4879]: I1125 14:27:11.644573 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:11 crc kubenswrapper[4879]: E1125 14:27:11.645471 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:11 crc kubenswrapper[4879]: E1125 14:27:11.645568 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:11 crc kubenswrapper[4879]: E1125 14:27:11.645243 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:12 crc kubenswrapper[4879]: I1125 14:27:12.644483 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:12 crc kubenswrapper[4879]: E1125 14:27:12.644644 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:13 crc kubenswrapper[4879]: I1125 14:27:13.644753 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:13 crc kubenswrapper[4879]: E1125 14:27:13.645256 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:13 crc kubenswrapper[4879]: I1125 14:27:13.644823 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:13 crc kubenswrapper[4879]: E1125 14:27:13.645374 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:13 crc kubenswrapper[4879]: I1125 14:27:13.644801 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:13 crc kubenswrapper[4879]: E1125 14:27:13.645507 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:14 crc kubenswrapper[4879]: I1125 14:27:14.643845 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:14 crc kubenswrapper[4879]: E1125 14:27:14.643994 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:14 crc kubenswrapper[4879]: E1125 14:27:14.780443 4879 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 14:27:15 crc kubenswrapper[4879]: I1125 14:27:15.644660 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:15 crc kubenswrapper[4879]: I1125 14:27:15.644769 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:15 crc kubenswrapper[4879]: I1125 14:27:15.644666 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:15 crc kubenswrapper[4879]: E1125 14:27:15.644985 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:15 crc kubenswrapper[4879]: E1125 14:27:15.644802 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:15 crc kubenswrapper[4879]: E1125 14:27:15.645200 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:16 crc kubenswrapper[4879]: I1125 14:27:16.643951 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:16 crc kubenswrapper[4879]: E1125 14:27:16.644190 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:17 crc kubenswrapper[4879]: I1125 14:27:17.644570 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:17 crc kubenswrapper[4879]: I1125 14:27:17.644572 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:17 crc kubenswrapper[4879]: I1125 14:27:17.644589 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:17 crc kubenswrapper[4879]: E1125 14:27:17.644754 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:17 crc kubenswrapper[4879]: E1125 14:27:17.645155 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:17 crc kubenswrapper[4879]: E1125 14:27:17.645250 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:17 crc kubenswrapper[4879]: I1125 14:27:17.645424 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.283327 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/3.log" Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.285911 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerStarted","Data":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.286396 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.324560 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podStartSLOduration=106.32454427 podStartE2EDuration="1m46.32454427s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:18.323607405 +0000 UTC m=+129.927020496" watchObservedRunningTime="2025-11-25 14:27:18.32454427 +0000 UTC m=+129.927957341" Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.517686 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-48cv4"] Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.517853 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:18 crc kubenswrapper[4879]: E1125 14:27:18.517994 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:18 crc kubenswrapper[4879]: I1125 14:27:18.643630 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:18 crc kubenswrapper[4879]: E1125 14:27:18.643793 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:19 crc kubenswrapper[4879]: I1125 14:27:19.644296 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:19 crc kubenswrapper[4879]: I1125 14:27:19.644340 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:19 crc kubenswrapper[4879]: E1125 14:27:19.645474 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:19 crc kubenswrapper[4879]: I1125 14:27:19.645624 4879 scope.go:117] "RemoveContainer" containerID="127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255" Nov 25 14:27:19 crc kubenswrapper[4879]: E1125 14:27:19.645680 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:19 crc kubenswrapper[4879]: E1125 14:27:19.780912 4879 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 14:27:20 crc kubenswrapper[4879]: I1125 14:27:20.294403 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/1.log" Nov 25 14:27:20 crc kubenswrapper[4879]: I1125 14:27:20.294471 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerStarted","Data":"04343c2e95b524805d649e04e2af6186367823857f00f9c90346cea5ad99b1a7"} Nov 25 14:27:20 crc kubenswrapper[4879]: I1125 14:27:20.644690 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:20 crc kubenswrapper[4879]: I1125 14:27:20.644737 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:20 crc kubenswrapper[4879]: E1125 14:27:20.644908 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:20 crc kubenswrapper[4879]: E1125 14:27:20.645078 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:21 crc kubenswrapper[4879]: I1125 14:27:21.644322 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:21 crc kubenswrapper[4879]: I1125 14:27:21.644386 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:21 crc kubenswrapper[4879]: E1125 14:27:21.644468 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:21 crc kubenswrapper[4879]: E1125 14:27:21.644953 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:22 crc kubenswrapper[4879]: I1125 14:27:22.644532 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:22 crc kubenswrapper[4879]: E1125 14:27:22.644653 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:22 crc kubenswrapper[4879]: I1125 14:27:22.644549 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:22 crc kubenswrapper[4879]: E1125 14:27:22.644820 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:23 crc kubenswrapper[4879]: I1125 14:27:23.644208 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:23 crc kubenswrapper[4879]: I1125 14:27:23.644309 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:23 crc kubenswrapper[4879]: E1125 14:27:23.644361 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 14:27:23 crc kubenswrapper[4879]: E1125 14:27:23.644478 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 14:27:24 crc kubenswrapper[4879]: I1125 14:27:24.644753 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:24 crc kubenswrapper[4879]: I1125 14:27:24.644768 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:24 crc kubenswrapper[4879]: E1125 14:27:24.644964 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-48cv4" podUID="960966b7-77d2-49d8-bfcc-2aa44e032f8c" Nov 25 14:27:24 crc kubenswrapper[4879]: E1125 14:27:24.645018 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 14:27:25 crc kubenswrapper[4879]: I1125 14:27:25.643731 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:25 crc kubenswrapper[4879]: I1125 14:27:25.643872 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:25 crc kubenswrapper[4879]: I1125 14:27:25.645657 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 25 14:27:25 crc kubenswrapper[4879]: I1125 14:27:25.646390 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 25 14:27:25 crc kubenswrapper[4879]: I1125 14:27:25.646398 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 25 14:27:25 crc kubenswrapper[4879]: I1125 14:27:25.647909 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 25 14:27:26 crc kubenswrapper[4879]: I1125 14:27:26.643995 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:26 crc kubenswrapper[4879]: I1125 14:27:26.644044 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:26 crc kubenswrapper[4879]: I1125 14:27:26.646848 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 25 14:27:26 crc kubenswrapper[4879]: I1125 14:27:26.646877 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 25 14:27:28 crc kubenswrapper[4879]: I1125 14:27:28.283843 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.605202 4879 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.643595 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gvljt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.644169 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-s72fm"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.644730 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.644988 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-k8vbb"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.645315 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.648294 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.655473 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.655554 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.655747 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-r4d2h"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.656590 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.657749 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.657881 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.658244 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cdwf8"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.658354 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.659004 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.660615 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.668893 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.669328 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.669953 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.669996 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.670262 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.670379 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.670429 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.670547 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.670584 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.671097 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.671294 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.671365 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.671423 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.671881 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.672317 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.672440 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.685274 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.686448 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.686596 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.687608 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.688338 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.690031 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.690383 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.690748 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-wcwhw"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.691325 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.709822 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4xk5z"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.710157 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.710468 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.710807 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4fqjp"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.709890 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.711296 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.711671 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.711963 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.711353 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.712575 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-t8blt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.713631 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.714460 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.714911 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.715038 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.715152 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.715328 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.715711 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.716507 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.717151 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-v9t69"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.717665 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.717954 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.718065 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.718492 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.719274 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.723791 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.726142 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.726520 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-26pf5"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.727214 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728153 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728326 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728532 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a96b9df3-d517-42ff-9217-faa4350f6c9d-node-pullsecrets\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728644 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh9ks\" (UniqueName: \"kubernetes.io/projected/a96b9df3-d517-42ff-9217-faa4350f6c9d-kube-api-access-fh9ks\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728756 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch6mw\" (UniqueName: \"kubernetes.io/projected/345c57bf-b025-4628-9e6d-475c85021591-kube-api-access-ch6mw\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728848 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-encryption-config\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728925 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-serving-cert\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728999 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcdpm\" (UniqueName: \"kubernetes.io/projected/94a3dcbc-eef1-4a71-82b2-5be7e1008428-kube-api-access-qcdpm\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.728932 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.729218 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-audit-policies\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.729427 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.729876 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9560d440-7afa-479c-9d1f-ca927e92afab-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730046 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/345c57bf-b025-4628-9e6d-475c85021591-audit-dir\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/94a3dcbc-eef1-4a71-82b2-5be7e1008428-auth-proxy-config\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730224 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/94a3dcbc-eef1-4a71-82b2-5be7e1008428-machine-approver-tls\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730408 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a96b9df3-d517-42ff-9217-faa4350f6c9d-audit-dir\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730323 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gvljt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730060 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730506 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnxsv\" (UniqueName: \"kubernetes.io/projected/9560d440-7afa-479c-9d1f-ca927e92afab-kube-api-access-nnxsv\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730713 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a3dcbc-eef1-4a71-82b2-5be7e1008428-config\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-etcd-client\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730862 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-etcd-serving-ca\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.730935 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-encryption-config\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731011 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731083 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-serving-cert\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731172 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-audit\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731244 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-image-import-ca\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731314 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731405 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731567 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-etcd-client\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731654 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9560d440-7afa-479c-9d1f-ca927e92afab-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.731741 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-config\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.732230 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lh9w9"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.733483 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.736557 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.737087 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.737955 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-2wdzj"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.738011 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.738215 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.738434 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.738566 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.739989 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740078 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740343 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740443 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740497 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740518 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740629 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740751 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.740968 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.741100 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.741829 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.743014 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.748451 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.750668 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.748891 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.748964 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.749044 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.749309 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.749410 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.749516 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.751990 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.749657 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.752248 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.752518 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-s72fm"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.752726 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.753395 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.753763 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.754110 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.749973 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.750185 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.761774 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762000 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762150 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762265 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762383 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762541 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762676 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762812 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.762882 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.763028 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.763211 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.763250 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.763390 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.763862 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.778430 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.778911 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.779782 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.780437 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.780646 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.780836 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.781657 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.782052 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.782244 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.782381 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.783193 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.783589 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.783677 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.783737 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.783839 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.784166 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.785112 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.785347 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.787670 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-s598b"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.788262 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.788578 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.788810 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.789716 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.792041 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.792508 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.792684 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.793164 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.793407 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.793500 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.793840 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.793858 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.794157 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gvqsd"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.794623 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.795436 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.796342 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.798263 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.798963 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hkvrt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.799561 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.799810 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.800096 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.800736 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.805026 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.806627 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.807288 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.807695 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.808482 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.809785 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.814309 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.824832 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.827510 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.828533 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.828864 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.829420 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mlcb5"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.829669 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.830328 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.830466 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.831694 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r4d2h"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.831718 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wvmgv"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.832430 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.837671 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/65da0977-5b25-4eee-b7f4-c7e746bba180-signing-key\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.837707 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.837717 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/af3665e8-e0e8-4a8b-ad9c-d38f57326203-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.837950 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/94a3dcbc-eef1-4a71-82b2-5be7e1008428-machine-approver-tls\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.837980 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a96b9df3-d517-42ff-9217-faa4350f6c9d-audit-dir\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838027 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnxsv\" (UniqueName: \"kubernetes.io/projected/9560d440-7afa-479c-9d1f-ca927e92afab-kube-api-access-nnxsv\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838056 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-ca\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838092 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-config\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838115 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-service-ca\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838153 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw5pq\" (UniqueName: \"kubernetes.io/projected/796c6e04-6bb0-4119-8433-4c050955799d-kube-api-access-sw5pq\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838177 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af3665e8-e0e8-4a8b-ad9c-d38f57326203-proxy-tls\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838202 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a3dcbc-eef1-4a71-82b2-5be7e1008428-config\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838225 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-client\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838248 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-client-ca\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838272 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prmb4\" (UniqueName: \"kubernetes.io/projected/f601aa7e-7179-4ce4-a83f-aa0c324970e0-kube-api-access-prmb4\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838293 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzm46\" (UniqueName: \"kubernetes.io/projected/2aa7a228-3e61-4072-ae6d-5ebf0fa16264-kube-api-access-vzm46\") pod \"dns-operator-744455d44c-cdwf8\" (UID: \"2aa7a228-3e61-4072-ae6d-5ebf0fa16264\") " pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838313 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7adf9b2-6679-4186-a78f-03d673b858df-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838331 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dfeab3d6-0236-4fde-8615-3b942e381dec-auth-proxy-config\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838346 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsmnw\" (UniqueName: \"kubernetes.io/projected/af3665e8-e0e8-4a8b-ad9c-d38f57326203-kube-api-access-xsmnw\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838365 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-etcd-client\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838397 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-etcd-serving-ca\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838414 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk8m7\" (UniqueName: \"kubernetes.io/projected/5d67a050-d127-49f2-a854-b8c6ffbd0f62-kube-api-access-wk8m7\") pod \"package-server-manager-789f6589d5-lw5bt\" (UID: \"5d67a050-d127-49f2-a854-b8c6ffbd0f62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.838483 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-serving-cert\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.839527 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.839878 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-etcd-serving-ca\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.839956 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a96b9df3-d517-42ff-9217-faa4350f6c9d-audit-dir\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.840050 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8659d\" (UniqueName: \"kubernetes.io/projected/65da0977-5b25-4eee-b7f4-c7e746bba180-kube-api-access-8659d\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.840075 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-encryption-config\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.840498 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94a3dcbc-eef1-4a71-82b2-5be7e1008428-config\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.840093 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-config\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.841904 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/604232d4-a119-4628-99a6-0c23df51f851-serving-cert\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.841936 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.841951 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-serving-cert\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842046 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpt6j\" (UniqueName: \"kubernetes.io/projected/83ced81b-e3fc-4a24-a212-4a3704e4a425-kube-api-access-jpt6j\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842070 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvv8l\" (UniqueName: \"kubernetes.io/projected/92593502-485f-47ee-aba3-392741b2740a-kube-api-access-zvv8l\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842091 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff0875c0-a709-470e-9d3b-8ce2d527cc37-serving-cert\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842111 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-service-ca-bundle\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8acacef-b1cb-4832-b107-964e0325cb17-audit-dir\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842173 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842191 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-audit\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842206 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-image-import-ca\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842222 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-service-ca\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842239 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37ee675a-61f3-4b5f-bff5-e6714462238d-trusted-ca\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842258 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842369 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842388 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm446\" (UniqueName: \"kubernetes.io/projected/a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe-kube-api-access-xm446\") pod \"cluster-samples-operator-665b6dd947-sfrm8\" (UID: \"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842405 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a460467-6ab5-46a6-81c0-3647f0761451-serving-cert\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842423 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842442 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7adf9b2-6679-4186-a78f-03d673b858df-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842460 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj7jz\" (UniqueName: \"kubernetes.io/projected/fe845b46-f773-40be-aa00-01b29bb5fa56-kube-api-access-vj7jz\") pod \"migrator-59844c95c7-qd5t9\" (UID: \"fe845b46-f773-40be-aa00-01b29bb5fa56\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842482 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-etcd-client\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842499 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9560d440-7afa-479c-9d1f-ca927e92afab-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842516 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfrmj\" (UniqueName: \"kubernetes.io/projected/ff0875c0-a709-470e-9d3b-8ce2d527cc37-kube-api-access-lfrmj\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842543 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/27a360a4-5ba8-4e1c-ae54-719572fd57d0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842559 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-client-ca\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842574 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842590 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-config\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842604 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/796c6e04-6bb0-4119-8433-4c050955799d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842625 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842640 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/dfeab3d6-0236-4fde-8615-3b942e381dec-images\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842654 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgrk7\" (UniqueName: \"kubernetes.io/projected/dfeab3d6-0236-4fde-8615-3b942e381dec-kube-api-access-xgrk7\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842674 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-config\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842689 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-trusted-ca-bundle\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842706 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pwfc\" (UniqueName: \"kubernetes.io/projected/048918ad-7966-4f28-8396-54b365083b35-kube-api-access-7pwfc\") pod \"downloads-7954f5f757-wcwhw\" (UID: \"048918ad-7966-4f28-8396-54b365083b35\") " pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.842721 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d67a050-d127-49f2-a854-b8c6ffbd0f62-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lw5bt\" (UID: \"5d67a050-d127-49f2-a854-b8c6ffbd0f62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843148 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843559 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fce6f2a-2662-4b9c-a631-9e590fc697cb-serving-cert\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843606 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843632 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-oauth-serving-cert\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843651 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843681 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a96b9df3-d517-42ff-9217-faa4350f6c9d-node-pullsecrets\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843697 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-sfrm8\" (UID: \"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843714 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-config\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843720 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-audit\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843732 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92593502-485f-47ee-aba3-392741b2740a-secret-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843753 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843771 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnqfz\" (UniqueName: \"kubernetes.io/projected/f8acacef-b1cb-4832-b107-964e0325cb17-kube-api-access-tnqfz\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843792 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37ee675a-61f3-4b5f-bff5-e6714462238d-config\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843814 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcl9w\" (UniqueName: \"kubernetes.io/projected/27a360a4-5ba8-4e1c-ae54-719572fd57d0-kube-api-access-lcl9w\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843839 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh9ks\" (UniqueName: \"kubernetes.io/projected/a96b9df3-d517-42ff-9217-faa4350f6c9d-kube-api-access-fh9ks\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843865 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843882 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/65da0977-5b25-4eee-b7f4-c7e746bba180-signing-cabundle\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843902 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch6mw\" (UniqueName: \"kubernetes.io/projected/345c57bf-b025-4628-9e6d-475c85021591-kube-api-access-ch6mw\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843925 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phqxx\" (UniqueName: \"kubernetes.io/projected/604232d4-a119-4628-99a6-0c23df51f851-kube-api-access-phqxx\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843951 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843970 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-config\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.843998 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-encryption-config\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844014 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dfeab3d6-0236-4fde-8615-3b942e381dec-proxy-tls\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844033 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-serving-cert\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844049 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/27a360a4-5ba8-4e1c-ae54-719572fd57d0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844064 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/83ced81b-e3fc-4a24-a212-4a3704e4a425-serving-cert\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844081 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/796c6e04-6bb0-4119-8433-4c050955799d-config\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844098 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844117 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c7adf9b2-6679-4186-a78f-03d673b858df-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844569 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cdwf8"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844682 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-image-import-ca\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.844973 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a96b9df3-d517-42ff-9217-faa4350f6c9d-node-pullsecrets\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845491 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845522 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-config\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845529 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-k8vbb"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845565 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcdpm\" (UniqueName: \"kubernetes.io/projected/94a3dcbc-eef1-4a71-82b2-5be7e1008428-kube-api-access-qcdpm\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845615 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlkgt\" (UniqueName: \"kubernetes.io/projected/37ee675a-61f3-4b5f-bff5-e6714462238d-kube-api-access-wlkgt\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845663 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ff0875c0-a709-470e-9d3b-8ce2d527cc37-available-featuregates\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845686 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/796c6e04-6bb0-4119-8433-4c050955799d-images\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845717 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-audit-policies\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845750 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845767 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2aa7a228-3e61-4072-ae6d-5ebf0fa16264-metrics-tls\") pod \"dns-operator-744455d44c-cdwf8\" (UID: \"2aa7a228-3e61-4072-ae6d-5ebf0fa16264\") " pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/27a360a4-5ba8-4e1c-ae54-719572fd57d0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845809 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845835 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9560d440-7afa-479c-9d1f-ca927e92afab-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845851 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37ee675a-61f3-4b5f-bff5-e6714462238d-serving-cert\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.845868 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxzbc\" (UniqueName: \"kubernetes.io/projected/8fce6f2a-2662-4b9c-a631-9e590fc697cb-kube-api-access-fxzbc\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846250 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/345c57bf-b025-4628-9e6d-475c85021591-audit-policies\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846326 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/345c57bf-b025-4628-9e6d-475c85021591-audit-dir\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846347 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a96b9df3-d517-42ff-9217-faa4350f6c9d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846362 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-oauth-config\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846383 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846404 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9560d440-7afa-479c-9d1f-ca927e92afab-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846404 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/345c57bf-b025-4628-9e6d-475c85021591-audit-dir\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846468 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/94a3dcbc-eef1-4a71-82b2-5be7e1008428-auth-proxy-config\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846505 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846523 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blxmn\" (UniqueName: \"kubernetes.io/projected/0a460467-6ab5-46a6-81c0-3647f0761451-kube-api-access-blxmn\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846544 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-audit-policies\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846883 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846898 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.846963 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/94a3dcbc-eef1-4a71-82b2-5be7e1008428-auth-proxy-config\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.848078 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.849185 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-serving-cert\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.849255 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.850057 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-etcd-client\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.850259 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-s598b"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.850618 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/94a3dcbc-eef1-4a71-82b2-5be7e1008428-machine-approver-tls\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.851273 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lh9w9"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.851419 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-encryption-config\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.852246 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.853227 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-26pf5"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.854276 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-t8blt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.855679 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9560d440-7afa-479c-9d1f-ca927e92afab-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.855730 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4xk5z"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.857455 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-etcd-client\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.858003 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.864297 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.865048 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c57bf-b025-4628-9e6d-475c85021591-serving-cert\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.865499 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.867450 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wcwhw"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.867575 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.870263 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a96b9df3-d517-42ff-9217-faa4350f6c9d-encryption-config\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.870599 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.873181 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-m5wkj"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.874060 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.875957 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-j2ht8"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.876399 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.878218 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.882817 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.886716 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.886923 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.888088 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.889513 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.891342 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.893517 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.895642 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4fqjp"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.897538 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.899397 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gvqsd"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.901415 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-v9t69"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.903018 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.904569 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.906303 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.907369 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.908206 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-j2ht8"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.910394 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mlcb5"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.912206 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hkvrt"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.914389 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wvmgv"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.915865 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-7mq24"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.916594 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.917605 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7mq24"] Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.927090 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947388 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947530 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-service-ca\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947563 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37ee675a-61f3-4b5f-bff5-e6714462238d-trusted-ca\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947587 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7adf9b2-6679-4186-a78f-03d673b858df-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947611 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj7jz\" (UniqueName: \"kubernetes.io/projected/fe845b46-f773-40be-aa00-01b29bb5fa56-kube-api-access-vj7jz\") pod \"migrator-59844c95c7-qd5t9\" (UID: \"fe845b46-f773-40be-aa00-01b29bb5fa56\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947634 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm446\" (UniqueName: \"kubernetes.io/projected/a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe-kube-api-access-xm446\") pod \"cluster-samples-operator-665b6dd947-sfrm8\" (UID: \"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947655 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a460467-6ab5-46a6-81c0-3647f0761451-serving-cert\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947674 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947708 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfrmj\" (UniqueName: \"kubernetes.io/projected/ff0875c0-a709-470e-9d3b-8ce2d527cc37-kube-api-access-lfrmj\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947728 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-config\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947746 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/796c6e04-6bb0-4119-8433-4c050955799d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947772 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/27a360a4-5ba8-4e1c-ae54-719572fd57d0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947791 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-client-ca\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947812 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947833 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/dfeab3d6-0236-4fde-8615-3b942e381dec-images\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947874 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgrk7\" (UniqueName: \"kubernetes.io/projected/dfeab3d6-0236-4fde-8615-3b942e381dec-kube-api-access-xgrk7\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947897 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-trusted-ca-bundle\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947917 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947937 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-oauth-serving-cert\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947960 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pwfc\" (UniqueName: \"kubernetes.io/projected/048918ad-7966-4f28-8396-54b365083b35-kube-api-access-7pwfc\") pod \"downloads-7954f5f757-wcwhw\" (UID: \"048918ad-7966-4f28-8396-54b365083b35\") " pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.947982 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d67a050-d127-49f2-a854-b8c6ffbd0f62-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lw5bt\" (UID: \"5d67a050-d127-49f2-a854-b8c6ffbd0f62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948012 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fce6f2a-2662-4b9c-a631-9e590fc697cb-serving-cert\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948035 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948056 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-sfrm8\" (UID: \"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948073 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-config\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948093 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92593502-485f-47ee-aba3-392741b2740a-secret-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948113 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948151 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnqfz\" (UniqueName: \"kubernetes.io/projected/f8acacef-b1cb-4832-b107-964e0325cb17-kube-api-access-tnqfz\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948171 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcl9w\" (UniqueName: \"kubernetes.io/projected/27a360a4-5ba8-4e1c-ae54-719572fd57d0-kube-api-access-lcl9w\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948190 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37ee675a-61f3-4b5f-bff5-e6714462238d-config\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948216 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948235 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/65da0977-5b25-4eee-b7f4-c7e746bba180-signing-cabundle\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948253 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phqxx\" (UniqueName: \"kubernetes.io/projected/604232d4-a119-4628-99a6-0c23df51f851-kube-api-access-phqxx\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948271 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948308 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-config\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948330 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dfeab3d6-0236-4fde-8615-3b942e381dec-proxy-tls\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948399 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/796c6e04-6bb0-4119-8433-4c050955799d-config\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948440 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948466 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/27a360a4-5ba8-4e1c-ae54-719572fd57d0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948487 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/83ced81b-e3fc-4a24-a212-4a3704e4a425-serving-cert\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948510 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c7adf9b2-6679-4186-a78f-03d673b858df-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948536 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlkgt\" (UniqueName: \"kubernetes.io/projected/37ee675a-61f3-4b5f-bff5-e6714462238d-kube-api-access-wlkgt\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948569 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ff0875c0-a709-470e-9d3b-8ce2d527cc37-available-featuregates\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948589 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/796c6e04-6bb0-4119-8433-4c050955799d-images\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948611 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/27a360a4-5ba8-4e1c-ae54-719572fd57d0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948633 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948655 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.948680 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37ee675a-61f3-4b5f-bff5-e6714462238d-trusted-ca\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.949787 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-config\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950191 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2aa7a228-3e61-4072-ae6d-5ebf0fa16264-metrics-tls\") pod \"dns-operator-744455d44c-cdwf8\" (UID: \"2aa7a228-3e61-4072-ae6d-5ebf0fa16264\") " pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950269 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxzbc\" (UniqueName: \"kubernetes.io/projected/8fce6f2a-2662-4b9c-a631-9e590fc697cb-kube-api-access-fxzbc\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950309 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37ee675a-61f3-4b5f-bff5-e6714462238d-serving-cert\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950355 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-oauth-config\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950382 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950504 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-audit-policies\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950534 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950545 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-client-ca\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blxmn\" (UniqueName: \"kubernetes.io/projected/0a460467-6ab5-46a6-81c0-3647f0761451-kube-api-access-blxmn\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950602 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/65da0977-5b25-4eee-b7f4-c7e746bba180-signing-key\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950666 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/af3665e8-e0e8-4a8b-ad9c-d38f57326203-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950692 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-ca\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950810 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af3665e8-e0e8-4a8b-ad9c-d38f57326203-proxy-tls\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950831 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-config\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950912 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-service-ca\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950939 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw5pq\" (UniqueName: \"kubernetes.io/projected/796c6e04-6bb0-4119-8433-4c050955799d-kube-api-access-sw5pq\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.950961 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hzzp\" (UniqueName: \"kubernetes.io/projected/f15a140e-3723-4b2d-8f93-0c261e02265d-kube-api-access-6hzzp\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951010 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-client\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-client-ca\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951074 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dfeab3d6-0236-4fde-8615-3b942e381dec-auth-proxy-config\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951098 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsmnw\" (UniqueName: \"kubernetes.io/projected/af3665e8-e0e8-4a8b-ad9c-d38f57326203-kube-api-access-xsmnw\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951137 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prmb4\" (UniqueName: \"kubernetes.io/projected/f601aa7e-7179-4ce4-a83f-aa0c324970e0-kube-api-access-prmb4\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951159 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzm46\" (UniqueName: \"kubernetes.io/projected/2aa7a228-3e61-4072-ae6d-5ebf0fa16264-kube-api-access-vzm46\") pod \"dns-operator-744455d44c-cdwf8\" (UID: \"2aa7a228-3e61-4072-ae6d-5ebf0fa16264\") " pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951157 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951181 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7adf9b2-6679-4186-a78f-03d673b858df-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951176 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-service-ca\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951249 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f15a140e-3723-4b2d-8f93-0c261e02265d-config\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951312 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk8m7\" (UniqueName: \"kubernetes.io/projected/5d67a050-d127-49f2-a854-b8c6ffbd0f62-kube-api-access-wk8m7\") pod \"package-server-manager-789f6589d5-lw5bt\" (UID: \"5d67a050-d127-49f2-a854-b8c6ffbd0f62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951354 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-config\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951390 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/604232d4-a119-4628-99a6-0c23df51f851-serving-cert\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951439 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-serving-cert\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951476 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8659d\" (UniqueName: \"kubernetes.io/projected/65da0977-5b25-4eee-b7f4-c7e746bba180-kube-api-access-8659d\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951512 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpt6j\" (UniqueName: \"kubernetes.io/projected/83ced81b-e3fc-4a24-a212-4a3704e4a425-kube-api-access-jpt6j\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951553 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvv8l\" (UniqueName: \"kubernetes.io/projected/92593502-485f-47ee-aba3-392741b2740a-kube-api-access-zvv8l\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951592 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f15a140e-3723-4b2d-8f93-0c261e02265d-serving-cert\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951637 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951642 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-config\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951675 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff0875c0-a709-470e-9d3b-8ce2d527cc37-serving-cert\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951720 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-service-ca-bundle\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951765 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8acacef-b1cb-4832-b107-964e0325cb17-audit-dir\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.951951 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8acacef-b1cb-4832-b107-964e0325cb17-audit-dir\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.952184 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37ee675a-61f3-4b5f-bff5-e6714462238d-config\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.954163 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ff0875c0-a709-470e-9d3b-8ce2d527cc37-available-featuregates\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.954337 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/796c6e04-6bb0-4119-8433-4c050955799d-config\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.955706 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.956085 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-service-ca\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.956086 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-config\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.956188 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/37ee675a-61f3-4b5f-bff5-e6714462238d-serving-cert\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.956671 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-serving-cert\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.956830 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.958382 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.958882 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-service-ca-bundle\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.959266 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-client\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.959564 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-client-ca\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.959862 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-trusted-ca-bundle\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.960255 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/27a360a4-5ba8-4e1c-ae54-719572fd57d0-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.960666 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dfeab3d6-0236-4fde-8615-3b942e381dec-auth-proxy-config\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.960675 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-oauth-serving-cert\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.960821 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-oauth-config\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.961436 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-config\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.962212 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.962686 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/796c6e04-6bb0-4119-8433-4c050955799d-images\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.962741 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/604232d4-a119-4628-99a6-0c23df51f851-serving-cert\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.962793 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.963213 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/83ced81b-e3fc-4a24-a212-4a3704e4a425-serving-cert\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.963273 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2aa7a228-3e61-4072-ae6d-5ebf0fa16264-metrics-tls\") pod \"dns-operator-744455d44c-cdwf8\" (UID: \"2aa7a228-3e61-4072-ae6d-5ebf0fa16264\") " pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.963926 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.964051 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff0875c0-a709-470e-9d3b-8ce2d527cc37-serving-cert\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.964472 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/796c6e04-6bb0-4119-8433-4c050955799d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.964622 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-audit-policies\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.964735 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.965690 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-config\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.966039 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.966390 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-sfrm8\" (UID: \"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.967375 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a460467-6ab5-46a6-81c0-3647f0761451-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.967856 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/83ced81b-e3fc-4a24-a212-4a3704e4a425-etcd-ca\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.967859 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fce6f2a-2662-4b9c-a631-9e590fc697cb-serving-cert\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.967995 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.968862 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/af3665e8-e0e8-4a8b-ad9c-d38f57326203-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.970018 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/27a360a4-5ba8-4e1c-ae54-719572fd57d0-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.970197 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a460467-6ab5-46a6-81c0-3647f0761451-serving-cert\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.971325 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.971835 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:29 crc kubenswrapper[4879]: I1125 14:27:29.989417 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.007259 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.027595 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.047678 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.053169 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hzzp\" (UniqueName: \"kubernetes.io/projected/f15a140e-3723-4b2d-8f93-0c261e02265d-kube-api-access-6hzzp\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.053236 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f15a140e-3723-4b2d-8f93-0c261e02265d-config\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.053286 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f15a140e-3723-4b2d-8f93-0c261e02265d-serving-cert\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.067211 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.087237 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.107322 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.126782 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.134292 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c7adf9b2-6679-4186-a78f-03d673b858df-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.147091 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.155600 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7adf9b2-6679-4186-a78f-03d673b858df-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.188790 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.207371 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.227444 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.247373 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.267539 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.287791 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.306785 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.327299 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.355089 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.367375 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.387344 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.406987 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.427119 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.447682 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.467009 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.487497 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.507731 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.528051 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.547442 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.568098 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.587713 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.607326 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.618160 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/dfeab3d6-0236-4fde-8615-3b942e381dec-images\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.627296 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.647089 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.667473 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.687275 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.707198 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.720161 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dfeab3d6-0236-4fde-8615-3b942e381dec-proxy-tls\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.726932 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.746904 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.767811 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.775638 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92593502-485f-47ee-aba3-392741b2740a-secret-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.788689 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.805753 4879 request.go:700] Waited for 1.011590849s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.807372 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.828012 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.834562 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d67a050-d127-49f2-a854-b8c6ffbd0f62-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-lw5bt\" (UID: \"5d67a050-d127-49f2-a854-b8c6ffbd0f62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.847341 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.861422 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/65da0977-5b25-4eee-b7f4-c7e746bba180-signing-key\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.867395 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.889045 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.908482 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.915825 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/65da0977-5b25-4eee-b7f4-c7e746bba180-signing-cabundle\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.928797 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.948592 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 25 14:27:30 crc kubenswrapper[4879]: E1125 14:27:30.955287 4879 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Nov 25 14:27:30 crc kubenswrapper[4879]: E1125 14:27:30.955383 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume podName:92593502-485f-47ee-aba3-392741b2740a nodeName:}" failed. No retries permitted until 2025-11-25 14:27:31.455357578 +0000 UTC m=+143.058770649 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume") pod "collect-profiles-29401335-qf2x4" (UID: "92593502-485f-47ee-aba3-392741b2740a") : failed to sync configmap cache: timed out waiting for the condition Nov 25 14:27:30 crc kubenswrapper[4879]: E1125 14:27:30.967434 4879 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Nov 25 14:27:30 crc kubenswrapper[4879]: E1125 14:27:30.967487 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/af3665e8-e0e8-4a8b-ad9c-d38f57326203-proxy-tls podName:af3665e8-e0e8-4a8b-ad9c-d38f57326203 nodeName:}" failed. No retries permitted until 2025-11-25 14:27:31.467472037 +0000 UTC m=+143.070885108 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/af3665e8-e0e8-4a8b-ad9c-d38f57326203-proxy-tls") pod "machine-config-controller-84d6567774-9rv62" (UID: "af3665e8-e0e8-4a8b-ad9c-d38f57326203") : failed to sync secret cache: timed out waiting for the condition Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.967505 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 25 14:27:30 crc kubenswrapper[4879]: I1125 14:27:30.988093 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.007702 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.026813 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.047626 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 25 14:27:31 crc kubenswrapper[4879]: E1125 14:27:31.053529 4879 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Nov 25 14:27:31 crc kubenswrapper[4879]: E1125 14:27:31.053602 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f15a140e-3723-4b2d-8f93-0c261e02265d-config podName:f15a140e-3723-4b2d-8f93-0c261e02265d nodeName:}" failed. No retries permitted until 2025-11-25 14:27:31.553580499 +0000 UTC m=+143.156993580 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/f15a140e-3723-4b2d-8f93-0c261e02265d-config") pod "service-ca-operator-777779d784-rdfg6" (UID: "f15a140e-3723-4b2d-8f93-0c261e02265d") : failed to sync configmap cache: timed out waiting for the condition Nov 25 14:27:31 crc kubenswrapper[4879]: E1125 14:27:31.053817 4879 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 25 14:27:31 crc kubenswrapper[4879]: E1125 14:27:31.053891 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f15a140e-3723-4b2d-8f93-0c261e02265d-serving-cert podName:f15a140e-3723-4b2d-8f93-0c261e02265d nodeName:}" failed. No retries permitted until 2025-11-25 14:27:31.553880167 +0000 UTC m=+143.157293248 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/f15a140e-3723-4b2d-8f93-0c261e02265d-serving-cert") pod "service-ca-operator-777779d784-rdfg6" (UID: "f15a140e-3723-4b2d-8f93-0c261e02265d") : failed to sync secret cache: timed out waiting for the condition Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.067812 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.090221 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.107898 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.128096 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.147419 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.167991 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.187774 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.207869 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.229067 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.248381 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.267888 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.287969 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.311403 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.328413 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.356002 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.368593 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.388109 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.408608 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.448249 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.467753 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.473577 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.473792 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af3665e8-e0e8-4a8b-ad9c-d38f57326203-proxy-tls\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.474499 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.476808 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/af3665e8-e0e8-4a8b-ad9c-d38f57326203-proxy-tls\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.488921 4879 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.521052 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnxsv\" (UniqueName: \"kubernetes.io/projected/9560d440-7afa-479c-9d1f-ca927e92afab-kube-api-access-nnxsv\") pod \"openshift-apiserver-operator-796bbdcf4f-dmxk6\" (UID: \"9560d440-7afa-479c-9d1f-ca927e92afab\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.548603 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch6mw\" (UniqueName: \"kubernetes.io/projected/345c57bf-b025-4628-9e6d-475c85021591-kube-api-access-ch6mw\") pod \"apiserver-7bbb656c7d-mwvsp\" (UID: \"345c57bf-b025-4628-9e6d-475c85021591\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.564211 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh9ks\" (UniqueName: \"kubernetes.io/projected/a96b9df3-d517-42ff-9217-faa4350f6c9d-kube-api-access-fh9ks\") pod \"apiserver-76f77b778f-k8vbb\" (UID: \"a96b9df3-d517-42ff-9217-faa4350f6c9d\") " pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.575361 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f15a140e-3723-4b2d-8f93-0c261e02265d-config\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.575438 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f15a140e-3723-4b2d-8f93-0c261e02265d-serving-cert\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.576198 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f15a140e-3723-4b2d-8f93-0c261e02265d-config\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.578806 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f15a140e-3723-4b2d-8f93-0c261e02265d-serving-cert\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.582072 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcdpm\" (UniqueName: \"kubernetes.io/projected/94a3dcbc-eef1-4a71-82b2-5be7e1008428-kube-api-access-qcdpm\") pod \"machine-approver-56656f9798-m88zc\" (UID: \"94a3dcbc-eef1-4a71-82b2-5be7e1008428\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.587299 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.587369 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.607511 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.623169 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.627776 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 25 14:27:31 crc kubenswrapper[4879]: W1125 14:27:31.636241 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94a3dcbc_eef1_4a71_82b2_5be7e1008428.slice/crio-66132d36510105037b9fbc646605d1a82e0c573f7fcc9efd346cb80e631a5b8e WatchSource:0}: Error finding container 66132d36510105037b9fbc646605d1a82e0c573f7fcc9efd346cb80e631a5b8e: Status 404 returned error can't find the container with id 66132d36510105037b9fbc646605d1a82e0c573f7fcc9efd346cb80e631a5b8e Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.646910 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.668045 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.687918 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.708049 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.724389 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.727332 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.747582 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.768088 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.803327 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6"] Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.803795 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnqfz\" (UniqueName: \"kubernetes.io/projected/f8acacef-b1cb-4832-b107-964e0325cb17-kube-api-access-tnqfz\") pod \"oauth-openshift-558db77b4-v9t69\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.806600 4879 request.go:700] Waited for 1.854668675s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa/token Nov 25 14:27:31 crc kubenswrapper[4879]: W1125 14:27:31.813935 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9560d440_7afa_479c_9d1f_ca927e92afab.slice/crio-2a60bb0f1801e57617155b9060d8d977fa5be6da87eb36f15fa9eea79c7368c8 WatchSource:0}: Error finding container 2a60bb0f1801e57617155b9060d8d977fa5be6da87eb36f15fa9eea79c7368c8: Status 404 returned error can't find the container with id 2a60bb0f1801e57617155b9060d8d977fa5be6da87eb36f15fa9eea79c7368c8 Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.816708 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.828554 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxzbc\" (UniqueName: \"kubernetes.io/projected/8fce6f2a-2662-4b9c-a631-9e590fc697cb-kube-api-access-fxzbc\") pod \"route-controller-manager-6576b87f9c-2498d\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.850305 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk8m7\" (UniqueName: \"kubernetes.io/projected/5d67a050-d127-49f2-a854-b8c6ffbd0f62-kube-api-access-wk8m7\") pod \"package-server-manager-789f6589d5-lw5bt\" (UID: \"5d67a050-d127-49f2-a854-b8c6ffbd0f62\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.858503 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.870752 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvv8l\" (UniqueName: \"kubernetes.io/projected/92593502-485f-47ee-aba3-392741b2740a-kube-api-access-zvv8l\") pod \"collect-profiles-29401335-qf2x4\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.885563 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8659d\" (UniqueName: \"kubernetes.io/projected/65da0977-5b25-4eee-b7f4-c7e746bba180-kube-api-access-8659d\") pod \"service-ca-9c57cc56f-gvqsd\" (UID: \"65da0977-5b25-4eee-b7f4-c7e746bba180\") " pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.895546 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.904754 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpt6j\" (UniqueName: \"kubernetes.io/projected/83ced81b-e3fc-4a24-a212-4a3704e4a425-kube-api-access-jpt6j\") pod \"etcd-operator-b45778765-4fqjp\" (UID: \"83ced81b-e3fc-4a24-a212-4a3704e4a425\") " pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.931326 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.935708 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcl9w\" (UniqueName: \"kubernetes.io/projected/27a360a4-5ba8-4e1c-ae54-719572fd57d0-kube-api-access-lcl9w\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.944682 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c7adf9b2-6679-4186-a78f-03d673b858df-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qmsbs\" (UID: \"c7adf9b2-6679-4186-a78f-03d673b858df\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.949779 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.965438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlkgt\" (UniqueName: \"kubernetes.io/projected/37ee675a-61f3-4b5f-bff5-e6714462238d-kube-api-access-wlkgt\") pod \"console-operator-58897d9998-t8blt\" (UID: \"37ee675a-61f3-4b5f-bff5-e6714462238d\") " pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.979168 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp"] Nov 25 14:27:31 crc kubenswrapper[4879]: I1125 14:27:31.984043 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phqxx\" (UniqueName: \"kubernetes.io/projected/604232d4-a119-4628-99a6-0c23df51f851-kube-api-access-phqxx\") pod \"controller-manager-879f6c89f-gvljt\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.003345 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw5pq\" (UniqueName: \"kubernetes.io/projected/796c6e04-6bb0-4119-8433-4c050955799d-kube-api-access-sw5pq\") pod \"machine-api-operator-5694c8668f-s72fm\" (UID: \"796c6e04-6bb0-4119-8433-4c050955799d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.009434 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-k8vbb"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.010942 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:32 crc kubenswrapper[4879]: W1125 14:27:32.015393 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod345c57bf_b025_4628_9e6d_475c85021591.slice/crio-6c0485918f4bb63bd6356653d11177272931c3f7ac783f3655b66dca6d026dad WatchSource:0}: Error finding container 6c0485918f4bb63bd6356653d11177272931c3f7ac783f3655b66dca6d026dad: Status 404 returned error can't find the container with id 6c0485918f4bb63bd6356653d11177272931c3f7ac783f3655b66dca6d026dad Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.016650 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:32 crc kubenswrapper[4879]: W1125 14:27:32.017629 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda96b9df3_d517_42ff_9217_faa4350f6c9d.slice/crio-0f5a1a13990d2ed5ab80924b3c25a3d87a4a3f9cd83d54667a771b99509a3ad2 WatchSource:0}: Error finding container 0f5a1a13990d2ed5ab80924b3c25a3d87a4a3f9cd83d54667a771b99509a3ad2: Status 404 returned error can't find the container with id 0f5a1a13990d2ed5ab80924b3c25a3d87a4a3f9cd83d54667a771b99509a3ad2 Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.023786 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgrk7\" (UniqueName: \"kubernetes.io/projected/dfeab3d6-0236-4fde-8615-3b942e381dec-kube-api-access-xgrk7\") pod \"machine-config-operator-74547568cd-s598b\" (UID: \"dfeab3d6-0236-4fde-8615-3b942e381dec\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.047518 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.048386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsmnw\" (UniqueName: \"kubernetes.io/projected/af3665e8-e0e8-4a8b-ad9c-d38f57326203-kube-api-access-xsmnw\") pod \"machine-config-controller-84d6567774-9rv62\" (UID: \"af3665e8-e0e8-4a8b-ad9c-d38f57326203\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.063466 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.071910 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prmb4\" (UniqueName: \"kubernetes.io/projected/f601aa7e-7179-4ce4-a83f-aa0c324970e0-kube-api-access-prmb4\") pod \"console-f9d7485db-r4d2h\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.084884 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.088649 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzm46\" (UniqueName: \"kubernetes.io/projected/2aa7a228-3e61-4072-ae6d-5ebf0fa16264-kube-api-access-vzm46\") pod \"dns-operator-744455d44c-cdwf8\" (UID: \"2aa7a228-3e61-4072-ae6d-5ebf0fa16264\") " pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.101290 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.109268 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.109661 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfrmj\" (UniqueName: \"kubernetes.io/projected/ff0875c0-a709-470e-9d3b-8ce2d527cc37-kube-api-access-lfrmj\") pod \"openshift-config-operator-7777fb866f-26pf5\" (UID: \"ff0875c0-a709-470e-9d3b-8ce2d527cc37\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.129928 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj7jz\" (UniqueName: \"kubernetes.io/projected/fe845b46-f773-40be-aa00-01b29bb5fa56-kube-api-access-vj7jz\") pod \"migrator-59844c95c7-qd5t9\" (UID: \"fe845b46-f773-40be-aa00-01b29bb5fa56\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.131556 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.138751 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.144824 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.147640 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm446\" (UniqueName: \"kubernetes.io/projected/a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe-kube-api-access-xm446\") pod \"cluster-samples-operator-665b6dd947-sfrm8\" (UID: \"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.162938 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.167112 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pwfc\" (UniqueName: \"kubernetes.io/projected/048918ad-7966-4f28-8396-54b365083b35-kube-api-access-7pwfc\") pod \"downloads-7954f5f757-wcwhw\" (UID: \"048918ad-7966-4f28-8396-54b365083b35\") " pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.179941 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.181004 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/27a360a4-5ba8-4e1c-ae54-719572fd57d0-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-s2pwm\" (UID: \"27a360a4-5ba8-4e1c-ae54-719572fd57d0\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.205876 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blxmn\" (UniqueName: \"kubernetes.io/projected/0a460467-6ab5-46a6-81c0-3647f0761451-kube-api-access-blxmn\") pod \"authentication-operator-69f744f599-4xk5z\" (UID: \"0a460467-6ab5-46a6-81c0-3647f0761451\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.225294 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hzzp\" (UniqueName: \"kubernetes.io/projected/f15a140e-3723-4b2d-8f93-0c261e02265d-kube-api-access-6hzzp\") pod \"service-ca-operator-777779d784-rdfg6\" (UID: \"f15a140e-3723-4b2d-8f93-0c261e02265d\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.290395 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295536 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w64gv\" (UniqueName: \"kubernetes.io/projected/b2bed98f-606e-49cb-88fc-76a5bd20af09-kube-api-access-w64gv\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295580 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5czk\" (UniqueName: \"kubernetes.io/projected/1baa8a85-62eb-48bf-a950-e4ad8c14b0c1-kube-api-access-m5czk\") pod \"multus-admission-controller-857f4d67dd-hkvrt\" (UID: \"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295605 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr2hk\" (UniqueName: \"kubernetes.io/projected/c82b6b23-4886-4ebd-a336-8dcef2d754fe-kube-api-access-xr2hk\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295623 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f28e079b-4592-4e5e-a59f-d2a3bab40e6a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pznxn\" (UID: \"f28e079b-4592-4e5e-a59f-d2a3bab40e6a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295639 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-config\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295654 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295668 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3baf06a-b99f-48bb-a21d-28ca86e3604f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295683 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/480e820a-fab4-451e-9aaa-97137963b98c-srv-cert\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295708 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqfh5\" (UniqueName: \"kubernetes.io/projected/480e820a-fab4-451e-9aaa-97137963b98c-kube-api-access-zqfh5\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295744 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295758 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c82b6b23-4886-4ebd-a336-8dcef2d754fe-profile-collector-cert\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295774 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-tls\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7e7d5272-a80d-42b6-9596-66894f68735d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295806 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295830 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90d1be85-7861-4c27-9cd1-3acf3b0721b4-webhook-cert\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295844 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-metrics-certs\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295860 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-stats-auth\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295889 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295905 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1baa8a85-62eb-48bf-a950-e4ad8c14b0c1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hkvrt\" (UID: \"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.295967 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e7d5272-a80d-42b6-9596-66894f68735d-config\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.296001 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.296015 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/480e820a-fab4-451e-9aaa-97137963b98c-profile-collector-cert\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.296031 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-certificates\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.296045 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94jmt\" (UniqueName: \"kubernetes.io/projected/ac840539-c3f6-424e-8203-6f2572a87b71-kube-api-access-94jmt\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.296091 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.297705 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.297916 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/90d1be85-7861-4c27-9cd1-3acf3b0721b4-tmpfs\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.298015 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:32.798002713 +0000 UTC m=+144.401415784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298064 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3baf06a-b99f-48bb-a21d-28ca86e3604f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298103 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d327afb3-9eeb-4835-a538-27402e1f5366-metrics-tls\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkfdj\" (UniqueName: \"kubernetes.io/projected/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-kube-api-access-kkfdj\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298315 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-default-certificate\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298370 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298405 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cr9m\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-kube-api-access-2cr9m\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298426 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shp8l\" (UniqueName: \"kubernetes.io/projected/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-kube-api-access-shp8l\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298447 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac840539-c3f6-424e-8203-6f2572a87b71-service-ca-bundle\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b57gg\" (UniqueName: \"kubernetes.io/projected/90d1be85-7861-4c27-9cd1-3acf3b0721b4-kube-api-access-b57gg\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298495 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-trusted-ca\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298510 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-bound-sa-token\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298530 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c82b6b23-4886-4ebd-a336-8dcef2d754fe-srv-cert\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298549 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d327afb3-9eeb-4835-a538-27402e1f5366-trusted-ca\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298580 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d327afb3-9eeb-4835-a538-27402e1f5366-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298665 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298686 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxw5d\" (UniqueName: \"kubernetes.io/projected/d327afb3-9eeb-4835-a538-27402e1f5366-kube-api-access-mxw5d\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298701 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2md4f\" (UniqueName: \"kubernetes.io/projected/f28e079b-4592-4e5e-a59f-d2a3bab40e6a-kube-api-access-2md4f\") pod \"control-plane-machine-set-operator-78cbb6b69f-pznxn\" (UID: \"f28e079b-4592-4e5e-a59f-d2a3bab40e6a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298734 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.298930 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e7d5272-a80d-42b6-9596-66894f68735d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.299038 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90d1be85-7861-4c27-9cd1-3acf3b0721b4-apiservice-cert\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.304250 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.331387 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.338396 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.351924 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" event={"ID":"9560d440-7afa-479c-9d1f-ca927e92afab","Type":"ContainerStarted","Data":"f4d4e5539bd11002ee05164229b7086ef5b9967100e9b95f2fece790413b3b32"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.351975 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" event={"ID":"9560d440-7afa-479c-9d1f-ca927e92afab","Type":"ContainerStarted","Data":"2a60bb0f1801e57617155b9060d8d977fa5be6da87eb36f15fa9eea79c7368c8"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.360060 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" event={"ID":"92593502-485f-47ee-aba3-392741b2740a","Type":"ContainerStarted","Data":"bc92d2bfa39a77b00df60acd7150a58477269655d7f3f94521b9b7f70401e722"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.364812 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" event={"ID":"5d67a050-d127-49f2-a854-b8c6ffbd0f62","Type":"ContainerStarted","Data":"f2a56c2ac5dd30298f6b5592b756ebe111b3f953353e3af7d87a6810e4abdb2b"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.367066 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" event={"ID":"a96b9df3-d517-42ff-9217-faa4350f6c9d","Type":"ContainerStarted","Data":"0f5a1a13990d2ed5ab80924b3c25a3d87a4a3f9cd83d54667a771b99509a3ad2"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.368345 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" event={"ID":"345c57bf-b025-4628-9e6d-475c85021591","Type":"ContainerStarted","Data":"6c0485918f4bb63bd6356653d11177272931c3f7ac783f3655b66dca6d026dad"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.369740 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" event={"ID":"94a3dcbc-eef1-4a71-82b2-5be7e1008428","Type":"ContainerStarted","Data":"62ce95a0287e23466e19f82416b0166811d64b3caefa007bd8838208a9e1d483"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.369765 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" event={"ID":"94a3dcbc-eef1-4a71-82b2-5be7e1008428","Type":"ContainerStarted","Data":"66132d36510105037b9fbc646605d1a82e0c573f7fcc9efd346cb80e631a5b8e"} Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400058 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400373 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b57gg\" (UniqueName: \"kubernetes.io/projected/90d1be85-7861-4c27-9cd1-3acf3b0721b4-kube-api-access-b57gg\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.400395 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:32.900364587 +0000 UTC m=+144.503777668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400437 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac840539-c3f6-424e-8203-6f2572a87b71-service-ca-bundle\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400513 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-trusted-ca\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400544 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-plugins-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400587 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-bound-sa-token\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400611 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e4a6faab-f982-47f5-a7e3-c26e4e65800f-certs\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c82b6b23-4886-4ebd-a336-8dcef2d754fe-srv-cert\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400665 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d327afb3-9eeb-4835-a538-27402e1f5366-trusted-ca\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400686 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d327afb3-9eeb-4835-a538-27402e1f5366-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400727 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-socket-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400757 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400791 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxw5d\" (UniqueName: \"kubernetes.io/projected/d327afb3-9eeb-4835-a538-27402e1f5366-kube-api-access-mxw5d\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400822 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2md4f\" (UniqueName: \"kubernetes.io/projected/f28e079b-4592-4e5e-a59f-d2a3bab40e6a-kube-api-access-2md4f\") pod \"control-plane-machine-set-operator-78cbb6b69f-pznxn\" (UID: \"f28e079b-4592-4e5e-a59f-d2a3bab40e6a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.400848 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401036 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e7d5272-a80d-42b6-9596-66894f68735d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401072 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9phqs\" (UniqueName: \"kubernetes.io/projected/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-kube-api-access-9phqs\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401102 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-mountpoint-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401196 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-registration-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401225 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-csi-data-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401428 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90d1be85-7861-4c27-9cd1-3acf3b0721b4-apiservice-cert\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401474 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac840539-c3f6-424e-8203-6f2572a87b71-service-ca-bundle\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401480 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w64gv\" (UniqueName: \"kubernetes.io/projected/b2bed98f-606e-49cb-88fc-76a5bd20af09-kube-api-access-w64gv\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401543 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5czk\" (UniqueName: \"kubernetes.io/projected/1baa8a85-62eb-48bf-a950-e4ad8c14b0c1-kube-api-access-m5czk\") pod \"multus-admission-controller-857f4d67dd-hkvrt\" (UID: \"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401582 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr2hk\" (UniqueName: \"kubernetes.io/projected/c82b6b23-4886-4ebd-a336-8dcef2d754fe-kube-api-access-xr2hk\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401602 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f28e079b-4592-4e5e-a59f-d2a3bab40e6a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pznxn\" (UID: \"f28e079b-4592-4e5e-a59f-d2a3bab40e6a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401620 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401643 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-config\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401665 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/480e820a-fab4-451e-9aaa-97137963b98c-srv-cert\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401703 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3baf06a-b99f-48bb-a21d-28ca86e3604f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401761 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqfh5\" (UniqueName: \"kubernetes.io/projected/480e820a-fab4-451e-9aaa-97137963b98c-kube-api-access-zqfh5\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401780 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffc8b006-a506-4534-a702-96aeb99f3505-cert\") pod \"ingress-canary-j2ht8\" (UID: \"ffc8b006-a506-4534-a702-96aeb99f3505\") " pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401798 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-config-volume\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401839 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.401879 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c82b6b23-4886-4ebd-a336-8dcef2d754fe-profile-collector-cert\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403086 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-tls\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403188 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7e7d5272-a80d-42b6-9596-66894f68735d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403229 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403290 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90d1be85-7861-4c27-9cd1-3acf3b0721b4-webhook-cert\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403318 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-metrics-certs\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403345 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-stats-auth\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403431 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.403467 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp4jt\" (UniqueName: \"kubernetes.io/projected/ad873970-06ce-4687-ac85-d245367b4d76-kube-api-access-sp4jt\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.404010 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-trusted-ca\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.404106 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d327afb3-9eeb-4835-a538-27402e1f5366-trusted-ca\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.405437 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-config\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.406766 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1baa8a85-62eb-48bf-a950-e4ad8c14b0c1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hkvrt\" (UID: \"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.406879 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9ckb\" (UniqueName: \"kubernetes.io/projected/e4a6faab-f982-47f5-a7e3-c26e4e65800f-kube-api-access-h9ckb\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.406987 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7hpz\" (UniqueName: \"kubernetes.io/projected/ffc8b006-a506-4534-a702-96aeb99f3505-kube-api-access-m7hpz\") pod \"ingress-canary-j2ht8\" (UID: \"ffc8b006-a506-4534-a702-96aeb99f3505\") " pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.407559 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.408238 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c82b6b23-4886-4ebd-a336-8dcef2d754fe-srv-cert\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.411787 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.412113 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.412221 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3baf06a-b99f-48bb-a21d-28ca86e3604f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.414761 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e7d5272-a80d-42b6-9596-66894f68735d-config\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.415446 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e7d5272-a80d-42b6-9596-66894f68735d-config\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.415827 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.415885 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/480e820a-fab4-451e-9aaa-97137963b98c-profile-collector-cert\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.416148 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-certificates\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.416252 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94jmt\" (UniqueName: \"kubernetes.io/projected/ac840539-c3f6-424e-8203-6f2572a87b71-kube-api-access-94jmt\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.416306 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e4a6faab-f982-47f5-a7e3-c26e4e65800f-node-bootstrap-token\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.416498 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.417789 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.418197 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:32.918181342 +0000 UTC m=+144.521594473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.418575 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/90d1be85-7861-4c27-9cd1-3acf3b0721b4-tmpfs\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.420261 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3baf06a-b99f-48bb-a21d-28ca86e3604f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.420400 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d327afb3-9eeb-4835-a538-27402e1f5366-metrics-tls\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.420500 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-metrics-tls\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.421270 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-certificates\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.421431 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/90d1be85-7861-4c27-9cd1-3acf3b0721b4-tmpfs\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.421740 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkfdj\" (UniqueName: \"kubernetes.io/projected/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-kube-api-access-kkfdj\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.421876 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-default-certificate\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.422043 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.426237 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c82b6b23-4886-4ebd-a336-8dcef2d754fe-profile-collector-cert\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.426420 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cr9m\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-kube-api-access-2cr9m\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.426453 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shp8l\" (UniqueName: \"kubernetes.io/projected/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-kube-api-access-shp8l\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.426615 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3baf06a-b99f-48bb-a21d-28ca86e3604f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.428978 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.429868 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-tls\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.430175 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/480e820a-fab4-451e-9aaa-97137963b98c-srv-cert\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.430453 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/480e820a-fab4-451e-9aaa-97137963b98c-profile-collector-cert\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.430717 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-metrics-certs\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.431162 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1baa8a85-62eb-48bf-a950-e4ad8c14b0c1-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hkvrt\" (UID: \"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.431355 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.431436 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90d1be85-7861-4c27-9cd1-3acf3b0721b4-apiservice-cert\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.431492 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e7d5272-a80d-42b6-9596-66894f68735d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.431877 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f28e079b-4592-4e5e-a59f-d2a3bab40e6a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pznxn\" (UID: \"f28e079b-4592-4e5e-a59f-d2a3bab40e6a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.432661 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90d1be85-7861-4c27-9cd1-3acf3b0721b4-webhook-cert\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.435574 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.439197 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.447818 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-default-certificate\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.452927 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ac840539-c3f6-424e-8203-6f2572a87b71-stats-auth\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.459774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d327afb3-9eeb-4835-a538-27402e1f5366-metrics-tls\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.463977 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b57gg\" (UniqueName: \"kubernetes.io/projected/90d1be85-7861-4c27-9cd1-3acf3b0721b4-kube-api-access-b57gg\") pod \"packageserver-d55dfcdfc-rbgb5\" (UID: \"90d1be85-7861-4c27-9cd1-3acf3b0721b4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.470383 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-t8blt"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.472384 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w64gv\" (UniqueName: \"kubernetes.io/projected/b2bed98f-606e-49cb-88fc-76a5bd20af09-kube-api-access-w64gv\") pod \"marketplace-operator-79b997595-mlcb5\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.485362 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d327afb3-9eeb-4835-a538-27402e1f5366-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.501596 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.508920 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.509726 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5czk\" (UniqueName: \"kubernetes.io/projected/1baa8a85-62eb-48bf-a950-e4ad8c14b0c1-kube-api-access-m5czk\") pod \"multus-admission-controller-857f4d67dd-hkvrt\" (UID: \"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.517609 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.527359 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-v9t69"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.539452 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr2hk\" (UniqueName: \"kubernetes.io/projected/c82b6b23-4886-4ebd-a336-8dcef2d754fe-kube-api-access-xr2hk\") pod \"olm-operator-6b444d44fb-t7qhx\" (UID: \"c82b6b23-4886-4ebd-a336-8dcef2d754fe\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.539574 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.539659 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.039639516 +0000 UTC m=+144.643052587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.540511 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp4jt\" (UniqueName: \"kubernetes.io/projected/ad873970-06ce-4687-ac85-d245367b4d76-kube-api-access-sp4jt\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.540638 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9ckb\" (UniqueName: \"kubernetes.io/projected/e4a6faab-f982-47f5-a7e3-c26e4e65800f-kube-api-access-h9ckb\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.540750 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7hpz\" (UniqueName: \"kubernetes.io/projected/ffc8b006-a506-4534-a702-96aeb99f3505-kube-api-access-m7hpz\") pod \"ingress-canary-j2ht8\" (UID: \"ffc8b006-a506-4534-a702-96aeb99f3505\") " pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.540929 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e4a6faab-f982-47f5-a7e3-c26e4e65800f-node-bootstrap-token\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.541051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.541308 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-metrics-tls\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.541597 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-plugins-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.542257 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.042247507 +0000 UTC m=+144.645660578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.541316 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gvqsd"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542559 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e4a6faab-f982-47f5-a7e3-c26e4e65800f-certs\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542655 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-socket-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542668 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-plugins-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542710 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9phqs\" (UniqueName: \"kubernetes.io/projected/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-kube-api-access-9phqs\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542726 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-mountpoint-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542725 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-socket-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.542769 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-registration-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.545143 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-mountpoint-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.547634 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-csi-data-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.547795 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-config-volume\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.547855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffc8b006-a506-4534-a702-96aeb99f3505-cert\") pod \"ingress-canary-j2ht8\" (UID: \"ffc8b006-a506-4534-a702-96aeb99f3505\") " pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.550517 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-metrics-tls\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.552400 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-registration-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.552688 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-config-volume\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.552956 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-bound-sa-token\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.552995 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ad873970-06ce-4687-ac85-d245367b4d76-csi-data-dir\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.560361 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffc8b006-a506-4534-a702-96aeb99f3505-cert\") pod \"ingress-canary-j2ht8\" (UID: \"ffc8b006-a506-4534-a702-96aeb99f3505\") " pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.577354 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e4a6faab-f982-47f5-a7e3-c26e4e65800f-node-bootstrap-token\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.582286 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7e7d5272-a80d-42b6-9596-66894f68735d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jkcmg\" (UID: \"7e7d5272-a80d-42b6-9596-66894f68735d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.587667 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e4a6faab-f982-47f5-a7e3-c26e4e65800f-certs\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.600996 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2md4f\" (UniqueName: \"kubernetes.io/projected/f28e079b-4592-4e5e-a59f-d2a3bab40e6a-kube-api-access-2md4f\") pod \"control-plane-machine-set-operator-78cbb6b69f-pznxn\" (UID: \"f28e079b-4592-4e5e-a59f-d2a3bab40e6a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.630226 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-4fqjp"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.632222 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d"] Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.639335 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxw5d\" (UniqueName: \"kubernetes.io/projected/d327afb3-9eeb-4835-a538-27402e1f5366-kube-api-access-mxw5d\") pod \"ingress-operator-5b745b69d9-lg6gr\" (UID: \"d327afb3-9eeb-4835-a538-27402e1f5366\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.642188 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqfh5\" (UniqueName: \"kubernetes.io/projected/480e820a-fab4-451e-9aaa-97137963b98c-kube-api-access-zqfh5\") pod \"catalog-operator-68c6474976-2xvfx\" (UID: \"480e820a-fab4-451e-9aaa-97137963b98c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.647602 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-xhsqg\" (UID: \"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.648628 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.648816 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.148782345 +0000 UTC m=+144.752195416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.648969 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.649344 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.149330231 +0000 UTC m=+144.752743292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.665755 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94jmt\" (UniqueName: \"kubernetes.io/projected/ac840539-c3f6-424e-8203-6f2572a87b71-kube-api-access-94jmt\") pod \"router-default-5444994796-2wdzj\" (UID: \"ac840539-c3f6-424e-8203-6f2572a87b71\") " pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.689420 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkfdj\" (UniqueName: \"kubernetes.io/projected/cf38f501-7552-4ec9-a1d5-ade9065c2c1b-kube-api-access-kkfdj\") pod \"kube-storage-version-migrator-operator-b67b599dd-jr5bb\" (UID: \"cf38f501-7552-4ec9-a1d5-ade9065c2c1b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.706453 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.713170 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.728353 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.735881 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shp8l\" (UniqueName: \"kubernetes.io/projected/0d0f8a3f-2831-4fcc-8fc3-726385de6bbd-kube-api-access-shp8l\") pod \"openshift-controller-manager-operator-756b6f6bc6-8qs2b\" (UID: \"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.736921 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.750402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.750560 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.250543934 +0000 UTC m=+144.853957005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.750735 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.751028 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.251020236 +0000 UTC m=+144.854433307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.751405 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.760705 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cr9m\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-kube-api-access-2cr9m\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.764744 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.766300 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp4jt\" (UniqueName: \"kubernetes.io/projected/ad873970-06ce-4687-ac85-d245367b4d76-kube-api-access-sp4jt\") pod \"csi-hostpathplugin-wvmgv\" (UID: \"ad873970-06ce-4687-ac85-d245367b4d76\") " pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.773379 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" Nov 25 14:27:32 crc kubenswrapper[4879]: W1125 14:27:32.784349 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fce6f2a_2662_4b9c_a631_9e590fc697cb.slice/crio-6ba0defdec3820e199cb74523e7c25a5da9a8e2e927aa758be1f343abc29c268 WatchSource:0}: Error finding container 6ba0defdec3820e199cb74523e7c25a5da9a8e2e927aa758be1f343abc29c268: Status 404 returned error can't find the container with id 6ba0defdec3820e199cb74523e7c25a5da9a8e2e927aa758be1f343abc29c268 Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.785561 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.787190 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9ckb\" (UniqueName: \"kubernetes.io/projected/e4a6faab-f982-47f5-a7e3-c26e4e65800f-kube-api-access-h9ckb\") pod \"machine-config-server-m5wkj\" (UID: \"e4a6faab-f982-47f5-a7e3-c26e4e65800f\") " pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.803005 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7hpz\" (UniqueName: \"kubernetes.io/projected/ffc8b006-a506-4534-a702-96aeb99f3505-kube-api-access-m7hpz\") pod \"ingress-canary-j2ht8\" (UID: \"ffc8b006-a506-4534-a702-96aeb99f3505\") " pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.825943 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.826054 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9phqs\" (UniqueName: \"kubernetes.io/projected/5c3f4126-b5a2-4de8-88d8-434b9cf9430f-kube-api-access-9phqs\") pod \"dns-default-7mq24\" (UID: \"5c3f4126-b5a2-4de8-88d8-434b9cf9430f\") " pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.851226 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.851466 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.851635 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.351612933 +0000 UTC m=+144.955026004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.851721 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.852142 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.352110917 +0000 UTC m=+144.955523988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.865420 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-m5wkj" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.875495 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-j2ht8" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.885954 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.954500 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.954670 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.454648436 +0000 UTC m=+145.058061507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.954806 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:32 crc kubenswrapper[4879]: E1125 14:27:32.955187 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.45517199 +0000 UTC m=+145.058585061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:32 crc kubenswrapper[4879]: I1125 14:27:32.958776 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.020557 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-s598b"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.021402 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-s72fm"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.027236 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.034980 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r4d2h"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.053796 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gvljt"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.056318 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.056841 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.556824966 +0000 UTC m=+145.160238037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: W1125 14:27:33.112443 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfeab3d6_0236_4fde_8615_3b942e381dec.slice/crio-adbe38093996d083336dc05719adfbb0fcbdef1d6b9e174092cbb3c6d227b316 WatchSource:0}: Error finding container adbe38093996d083336dc05719adfbb0fcbdef1d6b9e174092cbb3c6d227b316: Status 404 returned error can't find the container with id adbe38093996d083336dc05719adfbb0fcbdef1d6b9e174092cbb3c6d227b316 Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.158922 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.159310 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.659293723 +0000 UTC m=+145.262706794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.185315 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-4xk5z"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.197840 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.203850 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cdwf8"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.260164 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.262765 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.762736518 +0000 UTC m=+145.366149589 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: W1125 14:27:33.332681 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2aa7a228_3e61_4072_ae6d_5ebf0fa16264.slice/crio-39b94b6efcef89fdac3a81f725db4cbc38cf3fea51bec567451427913842c42b WatchSource:0}: Error finding container 39b94b6efcef89fdac3a81f725db4cbc38cf3fea51bec567451427913842c42b: Status 404 returned error can't find the container with id 39b94b6efcef89fdac3a81f725db4cbc38cf3fea51bec567451427913842c42b Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.363820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.364282 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.864261279 +0000 UTC m=+145.467674350 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.410754 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-m5wkj" event={"ID":"e4a6faab-f982-47f5-a7e3-c26e4e65800f","Type":"ContainerStarted","Data":"aecc30eda53cb92d21a966cf65cb6d272d9dc5ce508f3a43883c101a263685bc"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.420406 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r4d2h" event={"ID":"f601aa7e-7179-4ce4-a83f-aa0c324970e0","Type":"ContainerStarted","Data":"3199c3dc60aad498518e34db8e5a84727e2871e4a6a8ffd200ecbd97b03da752"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.430241 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" event={"ID":"5d67a050-d127-49f2-a854-b8c6ffbd0f62","Type":"ContainerStarted","Data":"b214b9020f1e5ba703cf7550946eb47bed06b42da838abfb7e361c3307a0ea82"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.430284 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" event={"ID":"5d67a050-d127-49f2-a854-b8c6ffbd0f62","Type":"ContainerStarted","Data":"19aa79738ef70cbc8b0a8ff2f22c2c89ef42bea807cadc48554f8c16e11e8597"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.439314 4879 generic.go:334] "Generic (PLEG): container finished" podID="345c57bf-b025-4628-9e6d-475c85021591" containerID="12431e9d028744e8985cb9123f2c7053c94e01c802875faff0652d2c332c46b8" exitCode=0 Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.439669 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" event={"ID":"345c57bf-b025-4628-9e6d-475c85021591","Type":"ContainerDied","Data":"12431e9d028744e8985cb9123f2c7053c94e01c802875faff0652d2c332c46b8"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.445763 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" event={"ID":"604232d4-a119-4628-99a6-0c23df51f851","Type":"ContainerStarted","Data":"a3767b0ce7c649a7a6f15267b6c79ffd9bd0149238f6029e82dffb85e621161b"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.447406 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" event={"ID":"65da0977-5b25-4eee-b7f4-c7e746bba180","Type":"ContainerStarted","Data":"0a2f5b3f13b694fbbdb165c5b6ff68fcd4c844621b8d2821f9569ce3c6c2c9dd"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.447538 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" event={"ID":"65da0977-5b25-4eee-b7f4-c7e746bba180","Type":"ContainerStarted","Data":"ff33ba712ea285c0a58475bbddaf74ad5566458e8c8de65ce54f7e3c6f009a94"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.452884 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" event={"ID":"dfeab3d6-0236-4fde-8615-3b942e381dec","Type":"ContainerStarted","Data":"adbe38093996d083336dc05719adfbb0fcbdef1d6b9e174092cbb3c6d227b316"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.461223 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" event={"ID":"f8acacef-b1cb-4832-b107-964e0325cb17","Type":"ContainerStarted","Data":"ef45d64cdc33151ea54ef12fbea3a4567c2d6c7fe6aba39e752481faf7c819fa"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.463104 4879 generic.go:334] "Generic (PLEG): container finished" podID="a96b9df3-d517-42ff-9217-faa4350f6c9d" containerID="b9774b8610fbd9a242438e54468e87f73e413d64fb054d1af7499b49122134f4" exitCode=0 Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.463159 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" event={"ID":"a96b9df3-d517-42ff-9217-faa4350f6c9d","Type":"ContainerDied","Data":"b9774b8610fbd9a242438e54468e87f73e413d64fb054d1af7499b49122134f4"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.466971 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.468676 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.968652169 +0000 UTC m=+145.572065240 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.469459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" event={"ID":"8fce6f2a-2662-4b9c-a631-9e590fc697cb","Type":"ContainerStarted","Data":"08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.469495 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" event={"ID":"8fce6f2a-2662-4b9c-a631-9e590fc697cb","Type":"ContainerStarted","Data":"6ba0defdec3820e199cb74523e7c25a5da9a8e2e927aa758be1f343abc29c268"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.471850 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.472340 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:33.97232682 +0000 UTC m=+145.575739891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.480465 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-t8blt" event={"ID":"37ee675a-61f3-4b5f-bff5-e6714462238d","Type":"ContainerStarted","Data":"2bd5b400f0dbd9b5b4f42eafdbce5a3e94896704ad05e5100e8711cda96ef4c7"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.480513 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-t8blt" event={"ID":"37ee675a-61f3-4b5f-bff5-e6714462238d","Type":"ContainerStarted","Data":"3a64fdcd177ae63d96c7e4b9279b05151496dc189da864287364bee4f77eef97"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.481222 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.497787 4879 patch_prober.go:28] interesting pod/console-operator-58897d9998-t8blt container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.497839 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-t8blt" podUID="37ee675a-61f3-4b5f-bff5-e6714462238d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.577111 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.577498 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.07746124 +0000 UTC m=+145.680874311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.633758 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mlcb5"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.679322 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.679625 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.179610718 +0000 UTC m=+145.783023789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704232 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2wdzj" event={"ID":"ac840539-c3f6-424e-8203-6f2572a87b71","Type":"ContainerStarted","Data":"0b4b1601700eb3dcc23a30c9f819303290ed84ad45a7e3594480a7f580ecaf4e"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704293 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704306 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704317 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" event={"ID":"94a3dcbc-eef1-4a71-82b2-5be7e1008428","Type":"ContainerStarted","Data":"dbf28ad6bf9ee533ffe485ca5107020e0389bb09a388e6c7227771996b9cb0ff"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704331 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" event={"ID":"0a460467-6ab5-46a6-81c0-3647f0761451","Type":"ContainerStarted","Data":"e537640b6995f6c145d86b2fcc2ab1c39a3dfa54cbbd52675c90f442d663ce87"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704349 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" event={"ID":"af3665e8-e0e8-4a8b-ad9c-d38f57326203","Type":"ContainerStarted","Data":"65680d2d1c758d51e7ed58f6fcced857f90d469dbeed1318ce105d95d9cc4f3c"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704361 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" event={"ID":"c7adf9b2-6679-4186-a78f-03d673b858df","Type":"ContainerStarted","Data":"0a9dde279b953ae0931555bc2ce162cfb906f980d9fb06332d18d28d5cc50ec9"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704376 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-26pf5"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704391 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704402 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wcwhw"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704416 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704426 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.704437 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.706452 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" event={"ID":"2aa7a228-3e61-4072-ae6d-5ebf0fa16264","Type":"ContainerStarted","Data":"39b94b6efcef89fdac3a81f725db4cbc38cf3fea51bec567451427913842c42b"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.709850 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" event={"ID":"796c6e04-6bb0-4119-8433-4c050955799d","Type":"ContainerStarted","Data":"87d7ac3c89d8a9ea449802e0f2bbf58547529ce55e6e82756380dc04975061b5"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.710940 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" event={"ID":"92593502-485f-47ee-aba3-392741b2740a","Type":"ContainerStarted","Data":"e30b889f7f7242b2bc6830e7603d16e46068abbe4accb0fd07e1aecec86ed667"} Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.717563 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" event={"ID":"83ced81b-e3fc-4a24-a212-4a3704e4a425","Type":"ContainerStarted","Data":"7561b09f97d0518c18a17e6e4df18af1044e4c74d1e01839d601e3381d068b58"} Nov 25 14:27:33 crc kubenswrapper[4879]: W1125 14:27:33.774621 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf15a140e_3723_4b2d_8f93_0c261e02265d.slice/crio-b9b2a801c19b4eef5e91df9166f9671481a48030d3f80cc992cf33f71b1f67f4 WatchSource:0}: Error finding container b9b2a801c19b4eef5e91df9166f9671481a48030d3f80cc992cf33f71b1f67f4: Status 404 returned error can't find the container with id b9b2a801c19b4eef5e91df9166f9671481a48030d3f80cc992cf33f71b1f67f4 Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.783999 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.784203 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.28407802 +0000 UTC m=+145.887491091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.795875 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.799663 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.299571321 +0000 UTC m=+145.902984392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.851342 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hkvrt"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.866886 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.882034 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.900729 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:33 crc kubenswrapper[4879]: E1125 14:27:33.901094 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.401074963 +0000 UTC m=+146.004488034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.902836 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.938889 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.952425 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.977778 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wvmgv"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.997277 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-j2ht8"] Nov 25 14:27:33 crc kubenswrapper[4879]: I1125 14:27:33.998678 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b"] Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.002654 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.003175 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.503111669 +0000 UTC m=+146.106524750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: W1125 14:27:34.017556 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod480e820a_fab4_451e_9aaa_97137963b98c.slice/crio-2014e5227c8505cf7ae2fc79999d80139b28bc87252e9cb2b7ae448938ef67ba WatchSource:0}: Error finding container 2014e5227c8505cf7ae2fc79999d80139b28bc87252e9cb2b7ae448938ef67ba: Status 404 returned error can't find the container with id 2014e5227c8505cf7ae2fc79999d80139b28bc87252e9cb2b7ae448938ef67ba Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.063619 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7mq24"] Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.104834 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.104984 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.604960999 +0000 UTC m=+146.208374070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.105264 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.105576 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.605564326 +0000 UTC m=+146.208977467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: W1125 14:27:34.164222 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6aaf5b7_9cd6_48e0_8dfd_cd19feb56335.slice/crio-ecb1ad8dd4a75e4172a3f16fa8404a5429c8c03aaf39497ac9d29c94cc2711ba WatchSource:0}: Error finding container ecb1ad8dd4a75e4172a3f16fa8404a5429c8c03aaf39497ac9d29c94cc2711ba: Status 404 returned error can't find the container with id ecb1ad8dd4a75e4172a3f16fa8404a5429c8c03aaf39497ac9d29c94cc2711ba Nov 25 14:27:34 crc kubenswrapper[4879]: W1125 14:27:34.168475 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d0f8a3f_2831_4fcc_8fc3_726385de6bbd.slice/crio-b2fd2a1d1fa6e6ddad7d0bafa3ddbb1344ca1de5d7f8fa8f2c225e411b7fa87d WatchSource:0}: Error finding container b2fd2a1d1fa6e6ddad7d0bafa3ddbb1344ca1de5d7f8fa8f2c225e411b7fa87d: Status 404 returned error can't find the container with id b2fd2a1d1fa6e6ddad7d0bafa3ddbb1344ca1de5d7f8fa8f2c225e411b7fa87d Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.207710 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.208531 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.708505346 +0000 UTC m=+146.311918417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.208683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.209054 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.709044601 +0000 UTC m=+146.312457672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.309734 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.310466 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.810446449 +0000 UTC m=+146.413859520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.318051 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-m88zc" podStartSLOduration=124.317998375 podStartE2EDuration="2m4.317998375s" podCreationTimestamp="2025-11-25 14:25:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.314704185 +0000 UTC m=+145.918117256" watchObservedRunningTime="2025-11-25 14:27:34.317998375 +0000 UTC m=+145.921411456" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.363966 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gvqsd" podStartSLOduration=122.363947625 podStartE2EDuration="2m2.363947625s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.360006368 +0000 UTC m=+145.963419439" watchObservedRunningTime="2025-11-25 14:27:34.363947625 +0000 UTC m=+145.967360706" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.398676 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-t8blt" podStartSLOduration=123.398660869 podStartE2EDuration="2m3.398660869s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.397789536 +0000 UTC m=+146.001202607" watchObservedRunningTime="2025-11-25 14:27:34.398660869 +0000 UTC m=+146.002073940" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.412794 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.413519 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:34.913502503 +0000 UTC m=+146.516915574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.441507 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" podStartSLOduration=123.441475924 podStartE2EDuration="2m3.441475924s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.437705641 +0000 UTC m=+146.041118722" watchObservedRunningTime="2025-11-25 14:27:34.441475924 +0000 UTC m=+146.044889005" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.490695 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dmxk6" podStartSLOduration=123.490666962 podStartE2EDuration="2m3.490666962s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.48250955 +0000 UTC m=+146.085922631" watchObservedRunningTime="2025-11-25 14:27:34.490666962 +0000 UTC m=+146.094080033" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.515813 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.516031 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.015990251 +0000 UTC m=+146.619403322 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.516343 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.516820 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.016810354 +0000 UTC m=+146.620223425 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.617054 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.617338 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.117302337 +0000 UTC m=+146.720715418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.718935 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.719555 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.219544108 +0000 UTC m=+146.822957179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.776276 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" event={"ID":"f15a140e-3723-4b2d-8f93-0c261e02265d","Type":"ContainerStarted","Data":"3ab40d906910d63d8b6350a8d68910e458c1cf51dd59fab1238953c9592c42c0"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.776324 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" event={"ID":"f15a140e-3723-4b2d-8f93-0c261e02265d","Type":"ContainerStarted","Data":"b9b2a801c19b4eef5e91df9166f9671481a48030d3f80cc992cf33f71b1f67f4"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.779311 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" event={"ID":"ad873970-06ce-4687-ac85-d245367b4d76","Type":"ContainerStarted","Data":"bfa950caa9e4ab45bda7b9ee44e1654c398fde3ce7c974ca857ad3379bf09b22"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.801292 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2wdzj" event={"ID":"ac840539-c3f6-424e-8203-6f2572a87b71","Type":"ContainerStarted","Data":"de8e9ea0d5e5e18c51b1d70f58d234a470a85df0d86536c4ae23bb15010339c7"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.819847 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.821230 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.321214524 +0000 UTC m=+146.924627595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.843029 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rdfg6" podStartSLOduration=122.843010578 podStartE2EDuration="2m2.843010578s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.798909658 +0000 UTC m=+146.402322719" watchObservedRunningTime="2025-11-25 14:27:34.843010578 +0000 UTC m=+146.446423659" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.891688 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" event={"ID":"cf38f501-7552-4ec9-a1d5-ade9065c2c1b","Type":"ContainerStarted","Data":"cba4a17648196315cdedcd63567d0fea902074326ee7893d9d1e45eb4662ac84"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.891928 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" event={"ID":"cf38f501-7552-4ec9-a1d5-ade9065c2c1b","Type":"ContainerStarted","Data":"b23045c7998a01f18c772a48798771bf2ad8330a201978275f906ebe99e69786"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.906388 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" event={"ID":"480e820a-fab4-451e-9aaa-97137963b98c","Type":"ContainerStarted","Data":"2014e5227c8505cf7ae2fc79999d80139b28bc87252e9cb2b7ae448938ef67ba"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.921430 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" event={"ID":"dfeab3d6-0236-4fde-8615-3b942e381dec","Type":"ContainerStarted","Data":"2c1850eb2b6d341d76c6f0c41f4f457640beafbf70e83f3c6095e3ac7e72e7b2"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.921475 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" event={"ID":"dfeab3d6-0236-4fde-8615-3b942e381dec","Type":"ContainerStarted","Data":"83016ae73c0bc351389b7c83b01c3bc86c9ca25f1f8cc1a14588b73c37106083"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.922102 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:34 crc kubenswrapper[4879]: E1125 14:27:34.923115 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.423084766 +0000 UTC m=+147.026497827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.928721 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" event={"ID":"345c57bf-b025-4628-9e6d-475c85021591","Type":"ContainerStarted","Data":"e87bcb0168e96c5d4b9cd5765355eca9bcf12417c690d0f60aad9acca4fc7bc4"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.929671 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jr5bb" podStartSLOduration=122.929658834 podStartE2EDuration="2m2.929658834s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.921614786 +0000 UTC m=+146.525027857" watchObservedRunningTime="2025-11-25 14:27:34.929658834 +0000 UTC m=+146.533071905" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.931018 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-2wdzj" podStartSLOduration=122.931011021 podStartE2EDuration="2m2.931011021s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.847617093 +0000 UTC m=+146.451030164" watchObservedRunningTime="2025-11-25 14:27:34.931011021 +0000 UTC m=+146.534424092" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.933032 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r4d2h" event={"ID":"f601aa7e-7179-4ce4-a83f-aa0c324970e0","Type":"ContainerStarted","Data":"f3d352a9412850e5e9af693db40c56ed6a8d9bd30d0f2983ad3410383f6c13ad"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.942610 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" event={"ID":"af3665e8-e0e8-4a8b-ad9c-d38f57326203","Type":"ContainerStarted","Data":"ff6876efdc11293b35f3323fbabcc4d8476bce1669c35b9f07400144470927f9"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.948570 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" podStartSLOduration=122.948556469 podStartE2EDuration="2m2.948556469s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.947920441 +0000 UTC m=+146.551333522" watchObservedRunningTime="2025-11-25 14:27:34.948556469 +0000 UTC m=+146.551969540" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.955603 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" event={"ID":"c7adf9b2-6679-4186-a78f-03d673b858df","Type":"ContainerStarted","Data":"20e3d8da21cf8032c8b6284df19c2f6e7c3715d5a944fae1ab74927265866f27"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.965247 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" event={"ID":"fe845b46-f773-40be-aa00-01b29bb5fa56","Type":"ContainerStarted","Data":"f9bdcea256c2717cdbd3f50fed03d1ad20dbc5b1c4c51da6d0e1fd9b5448669f"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.965281 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" event={"ID":"fe845b46-f773-40be-aa00-01b29bb5fa56","Type":"ContainerStarted","Data":"dfcc2b1567eb8567ec004a531d9c96391ee1e3c52062c899bbb0f1f69093a0f4"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.965404 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-r4d2h" podStartSLOduration=123.965382076 podStartE2EDuration="2m3.965382076s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.96294569 +0000 UTC m=+146.566358771" watchObservedRunningTime="2025-11-25 14:27:34.965382076 +0000 UTC m=+146.568795167" Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.980185 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" event={"ID":"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe","Type":"ContainerStarted","Data":"eb646b26da63c74168c5c522b2c2506b86ca8ba6cabadbfbffeaa4d36e8dd0e7"} Nov 25 14:27:34 crc kubenswrapper[4879]: I1125 14:27:34.986896 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qmsbs" podStartSLOduration=122.98684888 podStartE2EDuration="2m2.98684888s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:34.978645257 +0000 UTC m=+146.582058328" watchObservedRunningTime="2025-11-25 14:27:34.98684888 +0000 UTC m=+146.590261951" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.010904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" event={"ID":"83ced81b-e3fc-4a24-a212-4a3704e4a425","Type":"ContainerStarted","Data":"806c30a398e7f5c9f1bf98845cb8323d9e3d4a4565ea2a7eda3e3f3b1ec4ccb7"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.026076 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.026343 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.526324154 +0000 UTC m=+147.129737225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.026694 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.028931 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.528904404 +0000 UTC m=+147.132317475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.056604 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-4fqjp" podStartSLOduration=124.056588977 podStartE2EDuration="2m4.056588977s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.051993713 +0000 UTC m=+146.655406784" watchObservedRunningTime="2025-11-25 14:27:35.056588977 +0000 UTC m=+146.660002048" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.070305 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" event={"ID":"27a360a4-5ba8-4e1c-ae54-719572fd57d0","Type":"ContainerStarted","Data":"6529869b82d0d17b6056507addb36ad788e0909e8c87529da24e8eb00a5e0775"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.070348 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" event={"ID":"27a360a4-5ba8-4e1c-ae54-719572fd57d0","Type":"ContainerStarted","Data":"226c392b0535b3ffcadeeac04d992956f00dbd7bddf5fc4a2f85eac6e6cba3f3"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.083366 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" event={"ID":"90d1be85-7861-4c27-9cd1-3acf3b0721b4","Type":"ContainerStarted","Data":"c633662bda013bae969cbda1fae10f7d3978cec18e41317f33d7b6921f83f44d"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.083437 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" event={"ID":"90d1be85-7861-4c27-9cd1-3acf3b0721b4","Type":"ContainerStarted","Data":"1ab2d4580a9bd149cb6be634e6cefc50e7eca78640e66c4766296c51d2b313ac"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.084244 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.091360 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" event={"ID":"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd","Type":"ContainerStarted","Data":"b2fd2a1d1fa6e6ddad7d0bafa3ddbb1344ca1de5d7f8fa8f2c225e411b7fa87d"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.092681 4879 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-rbgb5 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.092717 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" podUID="90d1be85-7861-4c27-9cd1-3acf3b0721b4" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.093772 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" event={"ID":"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1","Type":"ContainerStarted","Data":"9850b7370109394234136947af29b53a2815a4161db2ed0402e7f1c641379594"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.097446 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" event={"ID":"f8acacef-b1cb-4832-b107-964e0325cb17","Type":"ContainerStarted","Data":"51bbb1cdd4373f565674cf95eb6515dd1166dfbe20a47401e6b3333e5479089f"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.098032 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.100583 4879 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-v9t69 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.32:6443/healthz\": dial tcp 10.217.0.32:6443: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.101541 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.32:6443/healthz\": dial tcp 10.217.0.32:6443: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.103430 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" event={"ID":"2aa7a228-3e61-4072-ae6d-5ebf0fa16264","Type":"ContainerStarted","Data":"53f622ebd4e5c685b24889b0956a129b1eaddb9a2400c036c0395b2948f1c4a6"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.105710 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" event={"ID":"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335","Type":"ContainerStarted","Data":"ecb1ad8dd4a75e4172a3f16fa8404a5429c8c03aaf39497ac9d29c94cc2711ba"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.113592 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-s2pwm" podStartSLOduration=123.113575968 podStartE2EDuration="2m3.113575968s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.109477466 +0000 UTC m=+146.712890537" watchObservedRunningTime="2025-11-25 14:27:35.113575968 +0000 UTC m=+146.716989039" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.118567 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" event={"ID":"0a460467-6ab5-46a6-81c0-3647f0761451","Type":"ContainerStarted","Data":"f05562c35217249013ba59bbab8814b45fb64d287438ce609cc8164498b4d452"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.129284 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.129622 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.629601613 +0000 UTC m=+147.233014684 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.145807 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" podStartSLOduration=124.145789394 podStartE2EDuration="2m4.145789394s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.144098578 +0000 UTC m=+146.747511649" watchObservedRunningTime="2025-11-25 14:27:35.145789394 +0000 UTC m=+146.749202465" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.146292 4879 generic.go:334] "Generic (PLEG): container finished" podID="ff0875c0-a709-470e-9d3b-8ce2d527cc37" containerID="4c156ea45de0ec97ec8b7d75510d6a11a9211127f022db3c4b3d6d3d8bca92c1" exitCode=0 Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.146356 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" event={"ID":"ff0875c0-a709-470e-9d3b-8ce2d527cc37","Type":"ContainerDied","Data":"4c156ea45de0ec97ec8b7d75510d6a11a9211127f022db3c4b3d6d3d8bca92c1"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.146381 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" event={"ID":"ff0875c0-a709-470e-9d3b-8ce2d527cc37","Type":"ContainerStarted","Data":"d614809c13c259e332f10bf45a85c944fa6cc31c2125e1ae0ae3f6d458b1b709"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.150633 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-j2ht8" event={"ID":"ffc8b006-a506-4534-a702-96aeb99f3505","Type":"ContainerStarted","Data":"931037c504dedb8e7b786a853590927e9f5c44d82c288f62e001a9915654b070"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.152406 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" event={"ID":"f28e079b-4592-4e5e-a59f-d2a3bab40e6a","Type":"ContainerStarted","Data":"19f2d731d4d38c48d503e73d09480498c6df0fd0e3a130074c756b5a84ee8f80"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.161864 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" event={"ID":"604232d4-a119-4628-99a6-0c23df51f851","Type":"ContainerStarted","Data":"60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.162622 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.164877 4879 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gvljt container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.164935 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" podUID="604232d4-a119-4628-99a6-0c23df51f851" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.170261 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" podStartSLOduration=123.170244269 podStartE2EDuration="2m3.170244269s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.16953636 +0000 UTC m=+146.772949431" watchObservedRunningTime="2025-11-25 14:27:35.170244269 +0000 UTC m=+146.773657340" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.176893 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" event={"ID":"796c6e04-6bb0-4119-8433-4c050955799d","Type":"ContainerStarted","Data":"f1dadd8218657ea59af182c35ec6fea21152faab7d812264b12f59cf6a571beb"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.176939 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" event={"ID":"796c6e04-6bb0-4119-8433-4c050955799d","Type":"ContainerStarted","Data":"64e5cb78d2034b09aec2bb880f0673d7ca50ba36e6124f034602caa7b710e143"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.182543 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" event={"ID":"b2bed98f-606e-49cb-88fc-76a5bd20af09","Type":"ContainerStarted","Data":"14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.182589 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" event={"ID":"b2bed98f-606e-49cb-88fc-76a5bd20af09","Type":"ContainerStarted","Data":"939985f53cde913ba06dbf3f2fee3d56d8444b892843d8d6d3ba208261d40f8e"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.183448 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.190315 4879 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mlcb5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.190386 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.193695 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" event={"ID":"7e7d5272-a80d-42b6-9596-66894f68735d","Type":"ContainerStarted","Data":"78775f487b186aec44e4840fe99364c9fa065258a07cf7675260134bc1f45134"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.193761 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" event={"ID":"7e7d5272-a80d-42b6-9596-66894f68735d","Type":"ContainerStarted","Data":"7f9fff453087d9a26896a39a33cb6a5e5f7068340429b28ca50e472df2cbe196"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.195901 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" event={"ID":"d327afb3-9eeb-4835-a538-27402e1f5366","Type":"ContainerStarted","Data":"97be018e340f16441e67a8ae2a3384a3f1c2e7d3ee8ddcfb5071033d29adf2c3"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.195933 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" event={"ID":"d327afb3-9eeb-4835-a538-27402e1f5366","Type":"ContainerStarted","Data":"88d35a4b94a8f072b4c76227d0af3dbe2c0fa1fe2ad23f772730545046043f7f"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.198123 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" event={"ID":"a96b9df3-d517-42ff-9217-faa4350f6c9d","Type":"ContainerStarted","Data":"71d01c87b1cbc70cbd01ef1edc7ccd917dbefd02cc2937c1ba5bb998aaee5f55"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.206512 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" event={"ID":"c82b6b23-4886-4ebd-a336-8dcef2d754fe","Type":"ContainerStarted","Data":"9fbf8ece179f68e63bce266d2baee59ce1079ab31a40f6d035e14c987dc01bee"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.206796 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" event={"ID":"c82b6b23-4886-4ebd-a336-8dcef2d754fe","Type":"ContainerStarted","Data":"187c60b395413cb1de570a14cb720e5d5448c360e40540c11bf8c1e2acc32911"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.207315 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.211168 4879 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-t7qhx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.211230 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" podUID="c82b6b23-4886-4ebd-a336-8dcef2d754fe" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.213459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7mq24" event={"ID":"5c3f4126-b5a2-4de8-88d8-434b9cf9430f","Type":"ContainerStarted","Data":"3d96f171ef2198d1d1c71661e1bcdb6ed930f150d1597cfd4a1a39668d7ac0d9"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.216642 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wcwhw" event={"ID":"048918ad-7966-4f28-8396-54b365083b35","Type":"ContainerStarted","Data":"42aebb2e72091d9e7d079420057fcc0e291f4f19edaaecf13c1e62afb6cb735c"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.216671 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wcwhw" event={"ID":"048918ad-7966-4f28-8396-54b365083b35","Type":"ContainerStarted","Data":"c4774bc06aeccd9b4dc0994de55a02437397fa997e29e17a01c625773a2424d3"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.216689 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.224346 4879 patch_prober.go:28] interesting pod/downloads-7954f5f757-wcwhw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.224400 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wcwhw" podUID="048918ad-7966-4f28-8396-54b365083b35" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.227233 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" podStartSLOduration=123.227221139 podStartE2EDuration="2m3.227221139s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.224899566 +0000 UTC m=+146.828312647" watchObservedRunningTime="2025-11-25 14:27:35.227221139 +0000 UTC m=+146.830634200" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.231739 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-m5wkj" event={"ID":"e4a6faab-f982-47f5-a7e3-c26e4e65800f","Type":"ContainerStarted","Data":"8acb069630a4978e1ae779d106a850c80298a2152739ee1d4e8767c1c411c384"} Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.231980 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.232055 4879 patch_prober.go:28] interesting pod/console-operator-58897d9998-t8blt container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.232087 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-t8blt" podUID="37ee675a-61f3-4b5f-bff5-e6714462238d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.232888 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.236258 4879 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-2498d container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.236343 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" podUID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.252415 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.256369 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.756349562 +0000 UTC m=+147.359762703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.290088 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-4xk5z" podStartSLOduration=124.290073189 podStartE2EDuration="2m4.290073189s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.25183759 +0000 UTC m=+146.855250661" watchObservedRunningTime="2025-11-25 14:27:35.290073189 +0000 UTC m=+146.893486260" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.300630 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" podStartSLOduration=124.300608745 podStartE2EDuration="2m4.300608745s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.289568605 +0000 UTC m=+146.892981676" watchObservedRunningTime="2025-11-25 14:27:35.300608745 +0000 UTC m=+146.904021816" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.340981 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" podStartSLOduration=123.340962233 podStartE2EDuration="2m3.340962233s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.340158732 +0000 UTC m=+146.943571823" watchObservedRunningTime="2025-11-25 14:27:35.340962233 +0000 UTC m=+146.944375314" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.355504 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.356299 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.85628406 +0000 UTC m=+147.459697131 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.358383 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-s72fm" podStartSLOduration=123.358359977 podStartE2EDuration="2m3.358359977s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.318954004 +0000 UTC m=+146.922367075" watchObservedRunningTime="2025-11-25 14:27:35.358359977 +0000 UTC m=+146.961773048" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.394655 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-m5wkj" podStartSLOduration=6.394637274 podStartE2EDuration="6.394637274s" podCreationTimestamp="2025-11-25 14:27:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.39229216 +0000 UTC m=+146.995705231" watchObservedRunningTime="2025-11-25 14:27:35.394637274 +0000 UTC m=+146.998050345" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.417836 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" podStartSLOduration=123.417815654 podStartE2EDuration="2m3.417815654s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.416915689 +0000 UTC m=+147.020328760" watchObservedRunningTime="2025-11-25 14:27:35.417815654 +0000 UTC m=+147.021228725" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.454661 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkcmg" podStartSLOduration=123.454644976 podStartE2EDuration="2m3.454644976s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.452780815 +0000 UTC m=+147.056193887" watchObservedRunningTime="2025-11-25 14:27:35.454644976 +0000 UTC m=+147.058058047" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.456949 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.457283 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:35.957271257 +0000 UTC m=+147.560684328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.476543 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-wcwhw" podStartSLOduration=124.476527902 podStartE2EDuration="2m4.476527902s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.476510971 +0000 UTC m=+147.079924072" watchObservedRunningTime="2025-11-25 14:27:35.476527902 +0000 UTC m=+147.079940973" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.535412 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" podStartSLOduration=123.535384253 podStartE2EDuration="2m3.535384253s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.508480171 +0000 UTC m=+147.111893242" watchObservedRunningTime="2025-11-25 14:27:35.535384253 +0000 UTC m=+147.138797324" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.556767 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" podStartSLOduration=123.55674715399999 podStartE2EDuration="2m3.556747154s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:35.529565515 +0000 UTC m=+147.132978606" watchObservedRunningTime="2025-11-25 14:27:35.556747154 +0000 UTC m=+147.160160225" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.562725 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.563221 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.063190079 +0000 UTC m=+147.666603210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.667246 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.668162 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.168146444 +0000 UTC m=+147.771559515 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.709809 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.738699 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:35 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:35 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:35 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.738769 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.768804 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.769386 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.269342597 +0000 UTC m=+147.872755668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.882396 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.882752 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.382735182 +0000 UTC m=+147.986148253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.983599 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.983860 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.483837792 +0000 UTC m=+148.087250873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:35 crc kubenswrapper[4879]: I1125 14:27:35.984061 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:35 crc kubenswrapper[4879]: E1125 14:27:35.984544 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.484532271 +0000 UTC m=+148.087945352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.086012 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.086554 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.586535897 +0000 UTC m=+148.189948978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.188252 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.188592 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.688580053 +0000 UTC m=+148.291993124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.237925 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" event={"ID":"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe","Type":"ContainerStarted","Data":"e95f89a06460044030d210fa75f774ca4d75174f30ba9bc9fbf2ded101326a06"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.237971 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" event={"ID":"a7c3c1f1-0f79-4a7d-8856-b4a6bed60cfe","Type":"ContainerStarted","Data":"dc2cd36ce272d067ce311419e02361ffa57259e59ca0e9cf85f8fa7493e4daa8"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.242058 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pznxn" event={"ID":"f28e079b-4592-4e5e-a59f-d2a3bab40e6a","Type":"ContainerStarted","Data":"a52a0e01af4121493b4ce3effecbe8e3e2a4d5ebc189a02854124242f831772e"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.251098 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-sfrm8" podStartSLOduration=125.251083402 podStartE2EDuration="2m5.251083402s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.250926998 +0000 UTC m=+147.854340069" watchObservedRunningTime="2025-11-25 14:27:36.251083402 +0000 UTC m=+147.854496473" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.251962 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" event={"ID":"ff0875c0-a709-470e-9d3b-8ce2d527cc37","Type":"ContainerStarted","Data":"8facf430f3116bfa7638c99b33f6009b5b9d192d1f1e799554105c6a4886bf95"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.252292 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.259641 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-j2ht8" event={"ID":"ffc8b006-a506-4534-a702-96aeb99f3505","Type":"ContainerStarted","Data":"b605a535e5a2d12cb11a8a7b9ce38635ae45b6b3c39df1ea394e4ba9b650ef6e"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.265666 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" event={"ID":"fe845b46-f773-40be-aa00-01b29bb5fa56","Type":"ContainerStarted","Data":"8a805c0aba1512028d9e72038d74f038d8517030de3108f2008ea4ce4a5e9f0d"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.268994 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" event={"ID":"af3665e8-e0e8-4a8b-ad9c-d38f57326203","Type":"ContainerStarted","Data":"b7638905b5bc1b9ab3a6ee5d838ef7bb9f92d022be3b6bda2845379644dc395c"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.272512 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" event={"ID":"480e820a-fab4-451e-9aaa-97137963b98c","Type":"ContainerStarted","Data":"3fce36732caee820ac3c0e941abb7bf3fe34de3e565b205ddd40332bdad0d360"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.273576 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.274003 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" podStartSLOduration=125.273988186 podStartE2EDuration="2m5.273988186s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.271893689 +0000 UTC m=+147.875306780" watchObservedRunningTime="2025-11-25 14:27:36.273988186 +0000 UTC m=+147.877401257" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.274723 4879 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2xvfx container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.274854 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" podUID="480e820a-fab4-451e-9aaa-97137963b98c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.277532 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" event={"ID":"c6aaf5b7-9cd6-48e0-8dfd-cd19feb56335","Type":"ContainerStarted","Data":"b6c29e66b5b3ec37d1165f9221ab79624e8cac155d5429f9acf2c9abd18b30e0"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.288769 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.288900 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.788877341 +0000 UTC m=+148.392290412 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.289045 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.290040 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.790021012 +0000 UTC m=+148.393434083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.293679 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" event={"ID":"0d0f8a3f-2831-4fcc-8fc3-726385de6bbd","Type":"ContainerStarted","Data":"ab2303f80d1b12a9a695b702869f783137f20acbfa791330554d12ab77b89fff"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.321956 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" event={"ID":"a96b9df3-d517-42ff-9217-faa4350f6c9d","Type":"ContainerStarted","Data":"991e3af02fd063301997405b683159440c7c99aada44b1672393a31766da8fd6"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.335615 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-j2ht8" podStartSLOduration=7.335578882 podStartE2EDuration="7.335578882s" podCreationTimestamp="2025-11-25 14:27:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.333656269 +0000 UTC m=+147.937069340" watchObservedRunningTime="2025-11-25 14:27:36.335578882 +0000 UTC m=+147.938991953" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.336925 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9rv62" podStartSLOduration=124.336915328 podStartE2EDuration="2m4.336915328s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.299993903 +0000 UTC m=+147.903406974" watchObservedRunningTime="2025-11-25 14:27:36.336915328 +0000 UTC m=+147.940328409" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.339311 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" event={"ID":"2aa7a228-3e61-4072-ae6d-5ebf0fa16264","Type":"ContainerStarted","Data":"0423eeedf51282a63bb2ebdf9e27ba0b20b65cc52c7a474af7e197bc18514319"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.342505 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7mq24" event={"ID":"5c3f4126-b5a2-4de8-88d8-434b9cf9430f","Type":"ContainerStarted","Data":"503806f9f75ea7f19d4166f269dddc455483ec4a90f53d08e8cc8ce448739f6c"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.342719 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7mq24" event={"ID":"5c3f4126-b5a2-4de8-88d8-434b9cf9430f","Type":"ContainerStarted","Data":"4bf3ecffb5892f1f4558765610d71bb062b062daafa531481f87d56d9ea6ee4a"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.343028 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.351575 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" event={"ID":"d327afb3-9eeb-4835-a538-27402e1f5366","Type":"ContainerStarted","Data":"6f57f724f2e987079264b42dbeba57a25eebafa3da95ba854966595dbd2aa36d"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.354532 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" event={"ID":"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1","Type":"ContainerStarted","Data":"5097956c896963b008d614f79963484d6773fe7c99413f852984aa5bf421ea8c"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.358564 4879 patch_prober.go:28] interesting pod/downloads-7954f5f757-wcwhw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.361951 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wcwhw" podUID="048918ad-7966-4f28-8396-54b365083b35" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.359021 4879 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-rbgb5 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.358948 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd5t9" podStartSLOduration=124.358937566 podStartE2EDuration="2m4.358937566s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.358353901 +0000 UTC m=+147.961766972" watchObservedRunningTime="2025-11-25 14:27:36.358937566 +0000 UTC m=+147.962350637" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.362193 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" podUID="90d1be85-7861-4c27-9cd1-3acf3b0721b4" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.359021 4879 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mlcb5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.362230 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.359297 4879 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-t7qhx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.362255 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" podUID="c82b6b23-4886-4ebd-a336-8dcef2d754fe" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.28:8443/healthz\": dial tcp 10.217.0.28:8443: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.359323 4879 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gvljt container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.362278 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" podUID="604232d4-a119-4628-99a6-0c23df51f851" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.363945 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" event={"ID":"1baa8a85-62eb-48bf-a950-e4ad8c14b0c1","Type":"ContainerStarted","Data":"e61d95a03537ca4986d22860eb95cdaead00f692fa86119154d2759dce0f240a"} Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.390550 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.390742 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.890719881 +0000 UTC m=+148.494132952 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.392481 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.395319 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.895306226 +0000 UTC m=+148.498719297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.447824 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" podStartSLOduration=125.447808225 podStartE2EDuration="2m5.447808225s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.444863585 +0000 UTC m=+148.048276656" watchObservedRunningTime="2025-11-25 14:27:36.447808225 +0000 UTC m=+148.051221296" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.488418 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8qs2b" podStartSLOduration=125.488394219 podStartE2EDuration="2m5.488394219s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.474024527 +0000 UTC m=+148.077437598" watchObservedRunningTime="2025-11-25 14:27:36.488394219 +0000 UTC m=+148.091807280" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.496119 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.497266 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:36.997248969 +0000 UTC m=+148.600662040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.509155 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" podStartSLOduration=124.509136343 podStartE2EDuration="2m4.509136343s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.508582798 +0000 UTC m=+148.111995869" watchObservedRunningTime="2025-11-25 14:27:36.509136343 +0000 UTC m=+148.112549414" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.545493 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lg6gr" podStartSLOduration=124.545478771 podStartE2EDuration="2m4.545478771s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.544837804 +0000 UTC m=+148.148250885" watchObservedRunningTime="2025-11-25 14:27:36.545478771 +0000 UTC m=+148.148891842" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.597481 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.597865 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.097809505 +0000 UTC m=+148.701222586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.623360 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-hkvrt" podStartSLOduration=124.62334434 podStartE2EDuration="2m4.62334434s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.583479045 +0000 UTC m=+148.186892126" watchObservedRunningTime="2025-11-25 14:27:36.62334434 +0000 UTC m=+148.226757411" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.623446 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-xhsqg" podStartSLOduration=124.623442722 podStartE2EDuration="2m4.623442722s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.62078195 +0000 UTC m=+148.224195021" watchObservedRunningTime="2025-11-25 14:27:36.623442722 +0000 UTC m=+148.226855783" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.643633 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-cdwf8" podStartSLOduration=125.643614721 podStartE2EDuration="2m5.643614721s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.64138638 +0000 UTC m=+148.244799451" watchObservedRunningTime="2025-11-25 14:27:36.643614721 +0000 UTC m=+148.247027792" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.702788 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.702967 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.703425 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.203406857 +0000 UTC m=+148.806819928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.718395 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-s598b" podStartSLOduration=124.718378845 podStartE2EDuration="2m4.718378845s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.717554412 +0000 UTC m=+148.320967483" watchObservedRunningTime="2025-11-25 14:27:36.718378845 +0000 UTC m=+148.321791916" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.720293 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-7mq24" podStartSLOduration=7.720281516 podStartE2EDuration="7.720281516s" podCreationTimestamp="2025-11-25 14:27:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:36.679007874 +0000 UTC m=+148.282420955" watchObservedRunningTime="2025-11-25 14:27:36.720281516 +0000 UTC m=+148.323694587" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.721557 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:36 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:36 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:36 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.721606 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.726288 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.726685 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.805033 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.805446 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.305431163 +0000 UTC m=+148.908844234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.817644 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.818560 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.820412 4879 patch_prober.go:28] interesting pod/apiserver-76f77b778f-k8vbb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.17:8443/livez\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.820493 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" podUID="a96b9df3-d517-42ff-9217-faa4350f6c9d" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.17:8443/livez\": dial tcp 10.217.0.17:8443: connect: connection refused" Nov 25 14:27:36 crc kubenswrapper[4879]: I1125 14:27:36.906047 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:36 crc kubenswrapper[4879]: E1125 14:27:36.906494 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.406475212 +0000 UTC m=+149.009888283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.007691 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.008080 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.508060746 +0000 UTC m=+149.111473867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.109574 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.109773 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.609747191 +0000 UTC m=+149.213160262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.110046 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.110386 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.610375539 +0000 UTC m=+149.213788610 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.211746 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.211875 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.711846559 +0000 UTC m=+149.315259630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.212067 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.212420 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.712381314 +0000 UTC m=+149.315794395 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.313590 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.313803 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.813775022 +0000 UTC m=+149.417188093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.313886 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.314219 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.814207804 +0000 UTC m=+149.417620875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.360157 4879 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-v9t69 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.32:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.360250 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.32:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.361034 4879 generic.go:334] "Generic (PLEG): container finished" podID="92593502-485f-47ee-aba3-392741b2740a" containerID="e30b889f7f7242b2bc6830e7603d16e46068abbe4accb0fd07e1aecec86ed667" exitCode=0 Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.361129 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" event={"ID":"92593502-485f-47ee-aba3-392741b2740a","Type":"ContainerDied","Data":"e30b889f7f7242b2bc6830e7603d16e46068abbe4accb0fd07e1aecec86ed667"} Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.362857 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" event={"ID":"ad873970-06ce-4687-ac85-d245367b4d76","Type":"ContainerStarted","Data":"ebcfa20cb4d926cbbccf00273baf402613f9bfe9d251c377a70671485d5e3bcc"} Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.363781 4879 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-mlcb5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.363842 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.371776 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2xvfx" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.385957 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.415390 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.415556 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.9155266 +0000 UTC m=+149.518939681 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.416406 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.418214 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:37.918202723 +0000 UTC m=+149.521615794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.518351 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.518917 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.018896763 +0000 UTC m=+149.622309834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.619692 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.620000 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.119987982 +0000 UTC m=+149.723401043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.659158 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.711110 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:37 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:37 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:37 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.711210 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.720692 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.720900 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.220874567 +0000 UTC m=+149.824287638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.720963 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.721382 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.22136884 +0000 UTC m=+149.824781911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.822616 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.822993 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.322964114 +0000 UTC m=+149.926377185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.823858 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.824161 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.324151557 +0000 UTC m=+149.927564628 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.824597 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:27:37 crc kubenswrapper[4879]: I1125 14:27:37.925163 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:37 crc kubenswrapper[4879]: E1125 14:27:37.925828 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.425808532 +0000 UTC m=+150.029221603 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.027445 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.027866 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.527851028 +0000 UTC m=+150.131264109 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.082673 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rbgb5" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.129343 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.129773 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.62975365 +0000 UTC m=+150.233166721 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.231725 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.232229 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.732209617 +0000 UTC m=+150.335622688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.333137 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.333400 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.833370509 +0000 UTC m=+150.436783580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.333797 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.334333 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.834317785 +0000 UTC m=+150.437730856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.379445 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mwvsp" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.435065 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.436719 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:38.93669979 +0000 UTC m=+150.540112871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.537347 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.537734 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.037719578 +0000 UTC m=+150.641132649 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.638217 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.638502 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.138468789 +0000 UTC m=+150.741881860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.638578 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.638889 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.13887126 +0000 UTC m=+150.742284331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.718609 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:38 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:38 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:38 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.718709 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.740042 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.740312 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.240281789 +0000 UTC m=+150.843694870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.841430 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.841752 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.341737318 +0000 UTC m=+150.945150399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.880028 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ts4q6"] Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.881146 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.895566 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.908415 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ts4q6"] Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.946652 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.946934 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqqld\" (UniqueName: \"kubernetes.io/projected/da124515-51aa-4521-8a22-f5239f2afaf3-kube-api-access-gqqld\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.946994 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-utilities\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:38 crc kubenswrapper[4879]: I1125 14:27:38.947093 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-catalog-content\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:38 crc kubenswrapper[4879]: E1125 14:27:38.947237 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.447217799 +0000 UTC m=+151.050630870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.050905 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqqld\" (UniqueName: \"kubernetes.io/projected/da124515-51aa-4521-8a22-f5239f2afaf3-kube-api-access-gqqld\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.051319 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-utilities\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.051373 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.051454 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-catalog-content\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.051890 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-utilities\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.052244 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.552228595 +0000 UTC m=+151.155641666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.053254 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-catalog-content\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.066476 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.093300 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8s9x8"] Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.093629 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92593502-485f-47ee-aba3-392741b2740a" containerName="collect-profiles" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.093652 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="92593502-485f-47ee-aba3-392741b2740a" containerName="collect-profiles" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.093761 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="92593502-485f-47ee-aba3-392741b2740a" containerName="collect-profiles" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.094663 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.096075 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8s9x8"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.096250 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqqld\" (UniqueName: \"kubernetes.io/projected/da124515-51aa-4521-8a22-f5239f2afaf3-kube-api-access-gqqld\") pod \"community-operators-ts4q6\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.102605 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.151898 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvv8l\" (UniqueName: \"kubernetes.io/projected/92593502-485f-47ee-aba3-392741b2740a-kube-api-access-zvv8l\") pod \"92593502-485f-47ee-aba3-392741b2740a\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.151969 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92593502-485f-47ee-aba3-392741b2740a-secret-volume\") pod \"92593502-485f-47ee-aba3-392741b2740a\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.152175 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.152255 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume\") pod \"92593502-485f-47ee-aba3-392741b2740a\" (UID: \"92593502-485f-47ee-aba3-392741b2740a\") " Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.152374 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.652342858 +0000 UTC m=+151.255755929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.153225 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume" (OuterVolumeSpecName: "config-volume") pod "92593502-485f-47ee-aba3-392741b2740a" (UID: "92593502-485f-47ee-aba3-392741b2740a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.153403 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-catalog-content\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.153462 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2zql\" (UniqueName: \"kubernetes.io/projected/49d11c77-256a-4941-af43-5ca7285521d5-kube-api-access-x2zql\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.153526 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-utilities\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.153591 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.153713 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92593502-485f-47ee-aba3-392741b2740a-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.154098 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.654084876 +0000 UTC m=+151.257498007 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.157256 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92593502-485f-47ee-aba3-392741b2740a-kube-api-access-zvv8l" (OuterVolumeSpecName: "kube-api-access-zvv8l") pod "92593502-485f-47ee-aba3-392741b2740a" (UID: "92593502-485f-47ee-aba3-392741b2740a"). InnerVolumeSpecName "kube-api-access-zvv8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.162458 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92593502-485f-47ee-aba3-392741b2740a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "92593502-485f-47ee-aba3-392741b2740a" (UID: "92593502-485f-47ee-aba3-392741b2740a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.205161 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.207096 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.218700 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.226992 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.228147 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.228430 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.254958 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.255268 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.755244538 +0000 UTC m=+151.358657609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.255653 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.255794 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6405483f-f67a-4236-8b63-16cca9a29957-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.255912 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-catalog-content\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.256072 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2zql\" (UniqueName: \"kubernetes.io/projected/49d11c77-256a-4941-af43-5ca7285521d5-kube-api-access-x2zql\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.256224 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6405483f-f67a-4236-8b63-16cca9a29957-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.256340 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-utilities\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.256454 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvv8l\" (UniqueName: \"kubernetes.io/projected/92593502-485f-47ee-aba3-392741b2740a-kube-api-access-zvv8l\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.256531 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92593502-485f-47ee-aba3-392741b2740a-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.256886 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.756838731 +0000 UTC m=+151.360251802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.257130 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-utilities\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.257502 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-catalog-content\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.265995 4879 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.269088 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-85g2l"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.270311 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.287390 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2zql\" (UniqueName: \"kubernetes.io/projected/49d11c77-256a-4941-af43-5ca7285521d5-kube-api-access-x2zql\") pod \"certified-operators-8s9x8\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.303025 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-85g2l"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.339613 4879 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-26pf5 container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.339656 4879 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-26pf5 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.339691 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" podUID="ff0875c0-a709-470e-9d3b-8ce2d527cc37" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.339715 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" podUID="ff0875c0-a709-470e-9d3b-8ce2d527cc37" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357098 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357318 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swclg\" (UniqueName: \"kubernetes.io/projected/0c959028-cdd8-44df-b9d6-92c193b3aa6d-kube-api-access-swclg\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357371 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6405483f-f67a-4236-8b63-16cca9a29957-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.357409 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.857388567 +0000 UTC m=+151.460801628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357533 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-catalog-content\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357560 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357639 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-utilities\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357672 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6405483f-f67a-4236-8b63-16cca9a29957-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.357793 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6405483f-f67a-4236-8b63-16cca9a29957-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.357842 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.857831888 +0000 UTC m=+151.461244959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.396827 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6405483f-f67a-4236-8b63-16cca9a29957-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.399212 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" event={"ID":"ad873970-06ce-4687-ac85-d245367b4d76","Type":"ContainerStarted","Data":"9f05146f6060c3777c01e4d7498ec7524a8c768321165b8bdd81eeb682468267"} Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.399251 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" event={"ID":"ad873970-06ce-4687-ac85-d245367b4d76","Type":"ContainerStarted","Data":"f4fd86e7a7a885b6709efa9ba883fe019637d74ca4e58354c4995b06990e5780"} Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.402045 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.407387 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4" event={"ID":"92593502-485f-47ee-aba3-392741b2740a","Type":"ContainerDied","Data":"bc92d2bfa39a77b00df60acd7150a58477269655d7f3f94521b9b7f70401e722"} Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.407443 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc92d2bfa39a77b00df60acd7150a58477269655d7f3f94521b9b7f70401e722" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.415499 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.458763 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.460048 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.960022248 +0000 UTC m=+151.563435309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.460792 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tcmmp"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.461939 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.475400 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swclg\" (UniqueName: \"kubernetes.io/projected/0c959028-cdd8-44df-b9d6-92c193b3aa6d-kube-api-access-swclg\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.475645 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-catalog-content\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.475668 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.475739 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-utilities\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.477770 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-catalog-content\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.477986 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:39.977974957 +0000 UTC m=+151.581388018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.480076 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-utilities\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.501155 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tcmmp"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.529789 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swclg\" (UniqueName: \"kubernetes.io/projected/0c959028-cdd8-44df-b9d6-92c193b3aa6d-kube-api-access-swclg\") pod \"community-operators-85g2l\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.548393 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.583658 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.583921 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.583964 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-catalog-content\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.583994 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-utilities\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.584013 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sv7z\" (UniqueName: \"kubernetes.io/projected/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-kube-api-access-7sv7z\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.584081 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.584108 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.584173 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.584278 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:40.084252378 +0000 UTC m=+151.687665449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.585798 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.589261 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.593985 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.594627 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.597019 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.664331 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ts4q6"] Nov 25 14:27:39 crc kubenswrapper[4879]: W1125 14:27:39.667215 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda124515_51aa_4521_8a22_f5239f2afaf3.slice/crio-401d2fd6c7411621c4bb8a5b1bc5158e70a392316220415c9853e14213844249 WatchSource:0}: Error finding container 401d2fd6c7411621c4bb8a5b1bc5158e70a392316220415c9853e14213844249: Status 404 returned error can't find the container with id 401d2fd6c7411621c4bb8a5b1bc5158e70a392316220415c9853e14213844249 Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.686817 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.686885 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-catalog-content\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.686905 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-utilities\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.686926 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sv7z\" (UniqueName: \"kubernetes.io/projected/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-kube-api-access-7sv7z\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.687510 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 14:27:40.187499477 +0000 UTC m=+151.790912548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-lh9w9" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.688386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-catalog-content\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.688425 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-utilities\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.718786 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:39 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:39 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:39 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.718836 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.729488 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sv7z\" (UniqueName: \"kubernetes.io/projected/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-kube-api-access-7sv7z\") pod \"certified-operators-tcmmp\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.761037 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.771769 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.792718 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:39 crc kubenswrapper[4879]: E1125 14:27:39.793041 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 14:27:40.293021487 +0000 UTC m=+151.896434558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.845547 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.857018 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.883684 4879 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-25T14:27:39.266026952Z","Handler":null,"Name":""} Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.894784 4879 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.894842 4879 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.895111 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.900796 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.901290 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.901342 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.969469 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-lh9w9\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.980520 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-85g2l"] Nov 25 14:27:39 crc kubenswrapper[4879]: I1125 14:27:39.997630 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 14:27:40 crc kubenswrapper[4879]: W1125 14:27:40.019646 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c959028_cdd8_44df_b9d6_92c193b3aa6d.slice/crio-75205545a428ff3eb11ed2cd24e113ea06d25677bcacbc39254996f8452cb321 WatchSource:0}: Error finding container 75205545a428ff3eb11ed2cd24e113ea06d25677bcacbc39254996f8452cb321: Status 404 returned error can't find the container with id 75205545a428ff3eb11ed2cd24e113ea06d25677bcacbc39254996f8452cb321 Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.022729 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.191720 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.212017 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8s9x8"] Nov 25 14:27:40 crc kubenswrapper[4879]: W1125 14:27:40.247744 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49d11c77_256a_4941_af43_5ca7285521d5.slice/crio-57c7e44faed79cde1356637d1f4cbd7fc0f4f052e89f23eddcc0184f2f466216 WatchSource:0}: Error finding container 57c7e44faed79cde1356637d1f4cbd7fc0f4f052e89f23eddcc0184f2f466216: Status 404 returned error can't find the container with id 57c7e44faed79cde1356637d1f4cbd7fc0f4f052e89f23eddcc0184f2f466216 Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.430952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85g2l" event={"ID":"0c959028-cdd8-44df-b9d6-92c193b3aa6d","Type":"ContainerStarted","Data":"75205545a428ff3eb11ed2cd24e113ea06d25677bcacbc39254996f8452cb321"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.433205 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8s9x8" event={"ID":"49d11c77-256a-4941-af43-5ca7285521d5","Type":"ContainerStarted","Data":"57c7e44faed79cde1356637d1f4cbd7fc0f4f052e89f23eddcc0184f2f466216"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.448466 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" event={"ID":"ad873970-06ce-4687-ac85-d245367b4d76","Type":"ContainerStarted","Data":"bdd210f027154d4bfcce801203c7aba01ced53e0b0b18343b82f283bd8f53012"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.452306 4879 generic.go:334] "Generic (PLEG): container finished" podID="da124515-51aa-4521-8a22-f5239f2afaf3" containerID="0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa" exitCode=0 Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.452376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ts4q6" event={"ID":"da124515-51aa-4521-8a22-f5239f2afaf3","Type":"ContainerDied","Data":"0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.452402 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ts4q6" event={"ID":"da124515-51aa-4521-8a22-f5239f2afaf3","Type":"ContainerStarted","Data":"401d2fd6c7411621c4bb8a5b1bc5158e70a392316220415c9853e14213844249"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.457547 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6405483f-f67a-4236-8b63-16cca9a29957","Type":"ContainerStarted","Data":"717d94cd8d1253e555b3561ef4c2519580a0657f3fa8c72da5f5edccf2b83c92"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.460654 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ff1f1bcbba712fa28e5bfb6b2e0073cd75fca3848725e521e6188827cda03226"} Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.472268 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-wvmgv" podStartSLOduration=11.472249726 podStartE2EDuration="11.472249726s" podCreationTimestamp="2025-11-25 14:27:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:40.470612291 +0000 UTC m=+152.074025372" watchObservedRunningTime="2025-11-25 14:27:40.472249726 +0000 UTC m=+152.075662797" Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.504553 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lh9w9"] Nov 25 14:27:40 crc kubenswrapper[4879]: W1125 14:27:40.527657 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3baf06a_b99f_48bb_a21d_28ca86e3604f.slice/crio-8c10c35ee914a0167aead162485d4962d9f9bf97458d4a2e5f053ace230b565d WatchSource:0}: Error finding container 8c10c35ee914a0167aead162485d4962d9f9bf97458d4a2e5f053ace230b565d: Status 404 returned error can't find the container with id 8c10c35ee914a0167aead162485d4962d9f9bf97458d4a2e5f053ace230b565d Nov 25 14:27:40 crc kubenswrapper[4879]: E1125 14:27:40.578759 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c959028_cdd8_44df_b9d6_92c193b3aa6d.slice/crio-conmon-c70de75b647da965675c1c20bac71955ba94d9969e1f4a62bf734db116fdce41.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49d11c77_256a_4941_af43_5ca7285521d5.slice/crio-conmon-bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720.scope\": RecentStats: unable to find data in memory cache]" Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.629340 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tcmmp"] Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.712428 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:40 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:40 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:40 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:40 crc kubenswrapper[4879]: I1125 14:27:40.712494 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.050595 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q79zr"] Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.051905 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.053687 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.061435 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q79zr"] Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.116031 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-utilities\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.116096 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qht2c\" (UniqueName: \"kubernetes.io/projected/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-kube-api-access-qht2c\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.116157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-catalog-content\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.217403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-catalog-content\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.217527 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-utilities\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.217603 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qht2c\" (UniqueName: \"kubernetes.io/projected/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-kube-api-access-qht2c\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.218058 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-catalog-content\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.218159 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-utilities\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.238844 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qht2c\" (UniqueName: \"kubernetes.io/projected/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-kube-api-access-qht2c\") pod \"redhat-marketplace-q79zr\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.351016 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-26pf5" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.394026 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.452104 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gc5xf"] Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.453557 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.461810 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gc5xf"] Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.472045 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"05415a8676f2e975db87e82c953eb00285a7c3dd5cac9e8a5f5bae8c42fa6e4f"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.472099 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e2aa1b72e6c5d67971edf7c9621bdca2f5b783251831b7e83e89aa5c027337d4"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.495697 4879 generic.go:334] "Generic (PLEG): container finished" podID="49d11c77-256a-4941-af43-5ca7285521d5" containerID="bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720" exitCode=0 Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.495816 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8s9x8" event={"ID":"49d11c77-256a-4941-af43-5ca7285521d5","Type":"ContainerDied","Data":"bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.497802 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.509785 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b1ba5949baa89c7bee1dd494918bbb6b93adffc7883172ff2bf71b20b9c4b812"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.509843 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3758d8d7c51e751247850397533719aab9ca871149c20c8af1d980ee64d9bd93"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.510329 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.515572 4879 generic.go:334] "Generic (PLEG): container finished" podID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerID="9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07" exitCode=0 Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.515653 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerDied","Data":"9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.515683 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerStarted","Data":"aa54e5c818b2033ba48385e7da518aa5b1ba0de43c1470c9b9d5871b09b08a58"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.526498 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmjkq\" (UniqueName: \"kubernetes.io/projected/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-kube-api-access-tmjkq\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.526539 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-catalog-content\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.526561 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-utilities\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.529001 4879 generic.go:334] "Generic (PLEG): container finished" podID="6405483f-f67a-4236-8b63-16cca9a29957" containerID="82084a0d815f7e993bf181011df1dcaca9e7c4eaa749b36ede10330fcc322454" exitCode=0 Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.529110 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6405483f-f67a-4236-8b63-16cca9a29957","Type":"ContainerDied","Data":"82084a0d815f7e993bf181011df1dcaca9e7c4eaa749b36ede10330fcc322454"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.535031 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"35270588307b044164a33f3775779f0a809bb862942c0219236a22e342362f81"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.550090 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerID="c70de75b647da965675c1c20bac71955ba94d9969e1f4a62bf734db116fdce41" exitCode=0 Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.550212 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85g2l" event={"ID":"0c959028-cdd8-44df-b9d6-92c193b3aa6d","Type":"ContainerDied","Data":"c70de75b647da965675c1c20bac71955ba94d9969e1f4a62bf734db116fdce41"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.556910 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" event={"ID":"b3baf06a-b99f-48bb-a21d-28ca86e3604f","Type":"ContainerStarted","Data":"d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.556942 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" event={"ID":"b3baf06a-b99f-48bb-a21d-28ca86e3604f","Type":"ContainerStarted","Data":"8c10c35ee914a0167aead162485d4962d9f9bf97458d4a2e5f053ace230b565d"} Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.556956 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.627377 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmjkq\" (UniqueName: \"kubernetes.io/projected/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-kube-api-access-tmjkq\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.627435 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-utilities\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.627458 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-catalog-content\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.628764 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-utilities\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.629029 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-catalog-content\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.651017 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmjkq\" (UniqueName: \"kubernetes.io/projected/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-kube-api-access-tmjkq\") pod \"redhat-marketplace-gc5xf\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.653388 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.673501 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" podStartSLOduration=129.673463034 podStartE2EDuration="2m9.673463034s" podCreationTimestamp="2025-11-25 14:25:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:41.671990563 +0000 UTC m=+153.275403624" watchObservedRunningTime="2025-11-25 14:27:41.673463034 +0000 UTC m=+153.276876105" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.712170 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q79zr"] Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.720311 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:41 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:41 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:41 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.720360 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.770990 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.823092 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:41 crc kubenswrapper[4879]: I1125 14:27:41.827044 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-k8vbb" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.027428 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-t8blt" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.056333 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cff9c"] Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.057424 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.059481 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.075256 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cff9c"] Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.133869 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.133909 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.135094 4879 patch_prober.go:28] interesting pod/console-f9d7485db-r4d2h container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.135185 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-r4d2h" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.136266 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-catalog-content\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.136354 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-utilities\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.136384 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwzdg\" (UniqueName: \"kubernetes.io/projected/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-kube-api-access-pwzdg\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.256820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-catalog-content\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.256896 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-utilities\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.256921 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwzdg\" (UniqueName: \"kubernetes.io/projected/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-kube-api-access-pwzdg\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.257506 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-utilities\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.257582 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-catalog-content\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.281912 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwzdg\" (UniqueName: \"kubernetes.io/projected/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-kube-api-access-pwzdg\") pod \"redhat-operators-cff9c\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.306509 4879 patch_prober.go:28] interesting pod/downloads-7954f5f757-wcwhw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.306637 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wcwhw" podUID="048918ad-7966-4f28-8396-54b365083b35" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.307302 4879 patch_prober.go:28] interesting pod/downloads-7954f5f757-wcwhw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.307321 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wcwhw" podUID="048918ad-7966-4f28-8396-54b365083b35" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.314876 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gc5xf"] Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.382194 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.452366 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5rxmf"] Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.453384 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.469569 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5rxmf"] Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.525955 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.559926 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-utilities\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.560022 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwnxj\" (UniqueName: \"kubernetes.io/projected/d2104658-98e8-415f-9c56-948de1dc2610-kube-api-access-fwnxj\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.560226 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-catalog-content\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.598137 4879 generic.go:334] "Generic (PLEG): container finished" podID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerID="5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8" exitCode=0 Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.598232 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q79zr" event={"ID":"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa","Type":"ContainerDied","Data":"5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8"} Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.598257 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q79zr" event={"ID":"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa","Type":"ContainerStarted","Data":"39c21192c67bb9cadd961581d871dd681874391069884e124d67bc9eb47ffc93"} Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.603630 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gc5xf" event={"ID":"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9","Type":"ContainerStarted","Data":"8f811a7cf6c3da4c64b5f4826e2570ad2a53ebe05e47bebf4bf19bf1af55b0cd"} Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.661930 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-catalog-content\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.662021 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-utilities\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.662086 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwnxj\" (UniqueName: \"kubernetes.io/projected/d2104658-98e8-415f-9c56-948de1dc2610-kube-api-access-fwnxj\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.662740 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-utilities\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.665417 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-catalog-content\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.703140 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwnxj\" (UniqueName: \"kubernetes.io/projected/d2104658-98e8-415f-9c56-948de1dc2610-kube-api-access-fwnxj\") pod \"redhat-operators-5rxmf\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.707713 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.711995 4879 patch_prober.go:28] interesting pod/router-default-5444994796-2wdzj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 14:27:42 crc kubenswrapper[4879]: [-]has-synced failed: reason withheld Nov 25 14:27:42 crc kubenswrapper[4879]: [+]process-running ok Nov 25 14:27:42 crc kubenswrapper[4879]: healthz check failed Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.712372 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2wdzj" podUID="ac840539-c3f6-424e-8203-6f2572a87b71" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.779277 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.793782 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-t7qhx" Nov 25 14:27:42 crc kubenswrapper[4879]: I1125 14:27:42.980069 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.029649 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cff9c"] Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.070396 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6405483f-f67a-4236-8b63-16cca9a29957-kubelet-dir\") pod \"6405483f-f67a-4236-8b63-16cca9a29957\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.070514 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6405483f-f67a-4236-8b63-16cca9a29957-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6405483f-f67a-4236-8b63-16cca9a29957" (UID: "6405483f-f67a-4236-8b63-16cca9a29957"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.070564 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6405483f-f67a-4236-8b63-16cca9a29957-kube-api-access\") pod \"6405483f-f67a-4236-8b63-16cca9a29957\" (UID: \"6405483f-f67a-4236-8b63-16cca9a29957\") " Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.071154 4879 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6405483f-f67a-4236-8b63-16cca9a29957-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.076499 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6405483f-f67a-4236-8b63-16cca9a29957-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6405483f-f67a-4236-8b63-16cca9a29957" (UID: "6405483f-f67a-4236-8b63-16cca9a29957"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.099230 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5rxmf"] Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.173145 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6405483f-f67a-4236-8b63-16cca9a29957-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:43 crc kubenswrapper[4879]: W1125 14:27:43.175934 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2104658_98e8_415f_9c56_948de1dc2610.slice/crio-10c51f31b2a8eb7765b8a125b0d13ba3079a431c84914395364a13ce903b682e WatchSource:0}: Error finding container 10c51f31b2a8eb7765b8a125b0d13ba3079a431c84914395364a13ce903b682e: Status 404 returned error can't find the container with id 10c51f31b2a8eb7765b8a125b0d13ba3079a431c84914395364a13ce903b682e Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.632316 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.632558 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6405483f-f67a-4236-8b63-16cca9a29957","Type":"ContainerDied","Data":"717d94cd8d1253e555b3561ef4c2519580a0657f3fa8c72da5f5edccf2b83c92"} Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.632728 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="717d94cd8d1253e555b3561ef4c2519580a0657f3fa8c72da5f5edccf2b83c92" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.635645 4879 generic.go:334] "Generic (PLEG): container finished" podID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerID="6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03" exitCode=0 Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.635779 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cff9c" event={"ID":"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e","Type":"ContainerDied","Data":"6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03"} Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.635811 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cff9c" event={"ID":"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e","Type":"ContainerStarted","Data":"14239db5b1bce35330727ec93b43186974c753983449d1ce39c4e28144684a80"} Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.650498 4879 generic.go:334] "Generic (PLEG): container finished" podID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerID="b3c89bcaf5caccc21595e977d6ed210497b55f5f99f14e22bf4cb59fc106c714" exitCode=0 Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.654083 4879 generic.go:334] "Generic (PLEG): container finished" podID="d2104658-98e8-415f-9c56-948de1dc2610" containerID="108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d" exitCode=0 Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.655101 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gc5xf" event={"ID":"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9","Type":"ContainerDied","Data":"b3c89bcaf5caccc21595e977d6ed210497b55f5f99f14e22bf4cb59fc106c714"} Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.655216 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerDied","Data":"108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d"} Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.655233 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerStarted","Data":"10c51f31b2a8eb7765b8a125b0d13ba3079a431c84914395364a13ce903b682e"} Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.716568 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.725573 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-2wdzj" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.951573 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 25 14:27:43 crc kubenswrapper[4879]: E1125 14:27:43.951833 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6405483f-f67a-4236-8b63-16cca9a29957" containerName="pruner" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.951849 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6405483f-f67a-4236-8b63-16cca9a29957" containerName="pruner" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.951994 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6405483f-f67a-4236-8b63-16cca9a29957" containerName="pruner" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.952430 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.956061 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.956618 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 25 14:27:43 crc kubenswrapper[4879]: I1125 14:27:43.959355 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.099901 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.100339 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.204981 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.205054 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.205139 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.239077 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.291906 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:44 crc kubenswrapper[4879]: I1125 14:27:44.897931 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 25 14:27:44 crc kubenswrapper[4879]: W1125 14:27:44.926205 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod653c7beb_0b48_4f5f_bbc0_6d2e96ff8820.slice/crio-6ca1e9a7a1a7d617894de8df24f90c9107100b5e01483b7e39ae5f6c499006b4 WatchSource:0}: Error finding container 6ca1e9a7a1a7d617894de8df24f90c9107100b5e01483b7e39ae5f6c499006b4: Status 404 returned error can't find the container with id 6ca1e9a7a1a7d617894de8df24f90c9107100b5e01483b7e39ae5f6c499006b4 Nov 25 14:27:45 crc kubenswrapper[4879]: I1125 14:27:45.696333 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820","Type":"ContainerStarted","Data":"6ca1e9a7a1a7d617894de8df24f90c9107100b5e01483b7e39ae5f6c499006b4"} Nov 25 14:27:46 crc kubenswrapper[4879]: I1125 14:27:46.743674 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820","Type":"ContainerStarted","Data":"2f16e10ac04269ace9cf5885a5c14d35c14c25ee06d0dff6d7bae5a6a8eead8d"} Nov 25 14:27:47 crc kubenswrapper[4879]: I1125 14:27:47.409214 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:27:47 crc kubenswrapper[4879]: I1125 14:27:47.409549 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:27:47 crc kubenswrapper[4879]: I1125 14:27:47.758820 4879 generic.go:334] "Generic (PLEG): container finished" podID="653c7beb-0b48-4f5f-bbc0-6d2e96ff8820" containerID="2f16e10ac04269ace9cf5885a5c14d35c14c25ee06d0dff6d7bae5a6a8eead8d" exitCode=0 Nov 25 14:27:47 crc kubenswrapper[4879]: I1125 14:27:47.758876 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820","Type":"ContainerDied","Data":"2f16e10ac04269ace9cf5885a5c14d35c14c25ee06d0dff6d7bae5a6a8eead8d"} Nov 25 14:27:47 crc kubenswrapper[4879]: I1125 14:27:47.889943 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-7mq24" Nov 25 14:27:52 crc kubenswrapper[4879]: I1125 14:27:52.133813 4879 patch_prober.go:28] interesting pod/console-f9d7485db-r4d2h container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 25 14:27:52 crc kubenswrapper[4879]: I1125 14:27:52.134384 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-r4d2h" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 25 14:27:52 crc kubenswrapper[4879]: I1125 14:27:52.304918 4879 patch_prober.go:28] interesting pod/downloads-7954f5f757-wcwhw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Nov 25 14:27:52 crc kubenswrapper[4879]: I1125 14:27:52.304985 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wcwhw" podUID="048918ad-7966-4f28-8396-54b365083b35" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Nov 25 14:27:52 crc kubenswrapper[4879]: I1125 14:27:52.305050 4879 patch_prober.go:28] interesting pod/downloads-7954f5f757-wcwhw container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" start-of-body= Nov 25 14:27:52 crc kubenswrapper[4879]: I1125 14:27:52.305074 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wcwhw" podUID="048918ad-7966-4f28-8396-54b365083b35" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.9:8080/\": dial tcp 10.217.0.9:8080: connect: connection refused" Nov 25 14:27:53 crc kubenswrapper[4879]: I1125 14:27:53.555904 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:53 crc kubenswrapper[4879]: I1125 14:27:53.561232 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/960966b7-77d2-49d8-bfcc-2aa44e032f8c-metrics-certs\") pod \"network-metrics-daemon-48cv4\" (UID: \"960966b7-77d2-49d8-bfcc-2aa44e032f8c\") " pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:53 crc kubenswrapper[4879]: I1125 14:27:53.664589 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-48cv4" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.213356 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.265353 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kubelet-dir\") pod \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.265681 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kube-api-access\") pod \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\" (UID: \"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820\") " Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.267368 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "653c7beb-0b48-4f5f-bbc0-6d2e96ff8820" (UID: "653c7beb-0b48-4f5f-bbc0-6d2e96ff8820"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.269556 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "653c7beb-0b48-4f5f-bbc0-6d2e96ff8820" (UID: "653c7beb-0b48-4f5f-bbc0-6d2e96ff8820"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.366686 4879 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.367005 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653c7beb-0b48-4f5f-bbc0-6d2e96ff8820-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.827493 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"653c7beb-0b48-4f5f-bbc0-6d2e96ff8820","Type":"ContainerDied","Data":"6ca1e9a7a1a7d617894de8df24f90c9107100b5e01483b7e39ae5f6c499006b4"} Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.827543 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ca1e9a7a1a7d617894de8df24f90c9107100b5e01483b7e39ae5f6c499006b4" Nov 25 14:27:54 crc kubenswrapper[4879]: I1125 14:27:54.827684 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 14:27:55 crc kubenswrapper[4879]: I1125 14:27:55.268956 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-48cv4"] Nov 25 14:27:55 crc kubenswrapper[4879]: W1125 14:27:55.277375 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod960966b7_77d2_49d8_bfcc_2aa44e032f8c.slice/crio-70a7b682554b757a0a532c14104e8bcd172e9397af8c95fc8768d2e0d1828539 WatchSource:0}: Error finding container 70a7b682554b757a0a532c14104e8bcd172e9397af8c95fc8768d2e0d1828539: Status 404 returned error can't find the container with id 70a7b682554b757a0a532c14104e8bcd172e9397af8c95fc8768d2e0d1828539 Nov 25 14:27:55 crc kubenswrapper[4879]: I1125 14:27:55.835522 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-48cv4" event={"ID":"960966b7-77d2-49d8-bfcc-2aa44e032f8c","Type":"ContainerStarted","Data":"eed4018c44ddd1e290463487d530740362960741375e9dbaf3bcbe883131668e"} Nov 25 14:27:55 crc kubenswrapper[4879]: I1125 14:27:55.835590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-48cv4" event={"ID":"960966b7-77d2-49d8-bfcc-2aa44e032f8c","Type":"ContainerStarted","Data":"70a7b682554b757a0a532c14104e8bcd172e9397af8c95fc8768d2e0d1828539"} Nov 25 14:27:57 crc kubenswrapper[4879]: I1125 14:27:57.846164 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-48cv4" event={"ID":"960966b7-77d2-49d8-bfcc-2aa44e032f8c","Type":"ContainerStarted","Data":"19cc9221e426f50f4aa7d57ad8035e144ed771c9af0d80ca6ec731b6bed01713"} Nov 25 14:27:57 crc kubenswrapper[4879]: I1125 14:27:57.862178 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-48cv4" podStartSLOduration=146.862157031 podStartE2EDuration="2m26.862157031s" podCreationTimestamp="2025-11-25 14:25:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:27:57.859617772 +0000 UTC m=+169.463030843" watchObservedRunningTime="2025-11-25 14:27:57.862157031 +0000 UTC m=+169.465570102" Nov 25 14:28:00 crc kubenswrapper[4879]: I1125 14:28:00.204100 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:28:02 crc kubenswrapper[4879]: I1125 14:28:02.137637 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:28:02 crc kubenswrapper[4879]: I1125 14:28:02.143343 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:28:02 crc kubenswrapper[4879]: I1125 14:28:02.309494 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-wcwhw" Nov 25 14:28:11 crc kubenswrapper[4879]: I1125 14:28:11.864231 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-lw5bt" Nov 25 14:28:16 crc kubenswrapper[4879]: E1125 14:28:16.177295 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 25 14:28:16 crc kubenswrapper[4879]: E1125 14:28:16.177668 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fwnxj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-5rxmf_openshift-marketplace(d2104658-98e8-415f-9c56-948de1dc2610): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:16 crc kubenswrapper[4879]: E1125 14:28:16.178875 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-5rxmf" podUID="d2104658-98e8-415f-9c56-948de1dc2610" Nov 25 14:28:17 crc kubenswrapper[4879]: I1125 14:28:17.408720 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:28:17 crc kubenswrapper[4879]: I1125 14:28:17.408801 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:28:19 crc kubenswrapper[4879]: I1125 14:28:19.989524 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 14:28:20 crc kubenswrapper[4879]: E1125 14:28:20.182430 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 25 14:28:20 crc kubenswrapper[4879]: E1125 14:28:20.182596 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pwzdg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-cff9c_openshift-marketplace(c80e42d3-60e0-43cd-a2d5-455e7a61cd1e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:20 crc kubenswrapper[4879]: E1125 14:28:20.183799 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-cff9c" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" Nov 25 14:28:20 crc kubenswrapper[4879]: E1125 14:28:20.721185 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-5rxmf" podUID="d2104658-98e8-415f-9c56-948de1dc2610" Nov 25 14:28:21 crc kubenswrapper[4879]: E1125 14:28:21.531887 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 25 14:28:21 crc kubenswrapper[4879]: E1125 14:28:21.532069 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gqqld,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-ts4q6_openshift-marketplace(da124515-51aa-4521-8a22-f5239f2afaf3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:21 crc kubenswrapper[4879]: E1125 14:28:21.534835 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-ts4q6" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" Nov 25 14:28:22 crc kubenswrapper[4879]: E1125 14:28:22.895488 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 25 14:28:22 crc kubenswrapper[4879]: E1125 14:28:22.895688 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-swclg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-85g2l_openshift-marketplace(0c959028-cdd8-44df-b9d6-92c193b3aa6d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:22 crc kubenswrapper[4879]: E1125 14:28:22.897026 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-85g2l" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" Nov 25 14:28:23 crc kubenswrapper[4879]: E1125 14:28:23.749640 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-cff9c" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" Nov 25 14:28:23 crc kubenswrapper[4879]: E1125 14:28:23.751253 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-85g2l" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" Nov 25 14:28:27 crc kubenswrapper[4879]: E1125 14:28:27.185109 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 25 14:28:27 crc kubenswrapper[4879]: E1125 14:28:27.185794 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qht2c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-q79zr_openshift-marketplace(d022699c-c1c7-40e3-8a77-bd9dbd66b1aa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:27 crc kubenswrapper[4879]: E1125 14:28:27.189087 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-q79zr" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" Nov 25 14:28:28 crc kubenswrapper[4879]: E1125 14:28:28.022548 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-q79zr" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" Nov 25 14:28:28 crc kubenswrapper[4879]: E1125 14:28:28.660028 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 25 14:28:28 crc kubenswrapper[4879]: E1125 14:28:28.660301 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x2zql,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-8s9x8_openshift-marketplace(49d11c77-256a-4941-af43-5ca7285521d5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:28 crc kubenswrapper[4879]: E1125 14:28:28.661500 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-8s9x8" podUID="49d11c77-256a-4941-af43-5ca7285521d5" Nov 25 14:28:29 crc kubenswrapper[4879]: E1125 14:28:29.028913 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-8s9x8" podUID="49d11c77-256a-4941-af43-5ca7285521d5" Nov 25 14:28:32 crc kubenswrapper[4879]: E1125 14:28:32.707758 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 25 14:28:32 crc kubenswrapper[4879]: E1125 14:28:32.708317 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tmjkq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-gc5xf_openshift-marketplace(06af87d7-2cc2-44cd-aa6d-f855a64ce6b9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:32 crc kubenswrapper[4879]: E1125 14:28:32.709480 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-gc5xf" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" Nov 25 14:28:33 crc kubenswrapper[4879]: E1125 14:28:33.053345 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-gc5xf" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" Nov 25 14:28:34 crc kubenswrapper[4879]: E1125 14:28:34.325645 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 25 14:28:34 crc kubenswrapper[4879]: E1125 14:28:34.325826 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7sv7z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-tcmmp_openshift-marketplace(2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 14:28:34 crc kubenswrapper[4879]: E1125 14:28:34.327433 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-tcmmp" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" Nov 25 14:28:35 crc kubenswrapper[4879]: E1125 14:28:35.064845 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-tcmmp" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" Nov 25 14:28:39 crc kubenswrapper[4879]: I1125 14:28:39.085429 4879 generic.go:334] "Generic (PLEG): container finished" podID="da124515-51aa-4521-8a22-f5239f2afaf3" containerID="47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98" exitCode=0 Nov 25 14:28:39 crc kubenswrapper[4879]: I1125 14:28:39.085499 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ts4q6" event={"ID":"da124515-51aa-4521-8a22-f5239f2afaf3","Type":"ContainerDied","Data":"47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98"} Nov 25 14:28:39 crc kubenswrapper[4879]: I1125 14:28:39.087788 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerStarted","Data":"edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93"} Nov 25 14:28:40 crc kubenswrapper[4879]: I1125 14:28:40.094169 4879 generic.go:334] "Generic (PLEG): container finished" podID="d2104658-98e8-415f-9c56-948de1dc2610" containerID="edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93" exitCode=0 Nov 25 14:28:40 crc kubenswrapper[4879]: I1125 14:28:40.094216 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerDied","Data":"edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93"} Nov 25 14:28:41 crc kubenswrapper[4879]: I1125 14:28:41.101865 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerID="ed8b4d5e67e9f95d0c350a7803708dcfb86b7739d3a4ed990e4ac55eecbb65e1" exitCode=0 Nov 25 14:28:41 crc kubenswrapper[4879]: I1125 14:28:41.101940 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85g2l" event={"ID":"0c959028-cdd8-44df-b9d6-92c193b3aa6d","Type":"ContainerDied","Data":"ed8b4d5e67e9f95d0c350a7803708dcfb86b7739d3a4ed990e4ac55eecbb65e1"} Nov 25 14:28:41 crc kubenswrapper[4879]: I1125 14:28:41.105221 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ts4q6" event={"ID":"da124515-51aa-4521-8a22-f5239f2afaf3","Type":"ContainerStarted","Data":"d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd"} Nov 25 14:28:41 crc kubenswrapper[4879]: I1125 14:28:41.106751 4879 generic.go:334] "Generic (PLEG): container finished" podID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerID="3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b" exitCode=0 Nov 25 14:28:41 crc kubenswrapper[4879]: I1125 14:28:41.106778 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cff9c" event={"ID":"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e","Type":"ContainerDied","Data":"3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b"} Nov 25 14:28:41 crc kubenswrapper[4879]: I1125 14:28:41.164872 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ts4q6" podStartSLOduration=4.870079467 podStartE2EDuration="1m3.16485207s" podCreationTimestamp="2025-11-25 14:27:38 +0000 UTC" firstStartedPulling="2025-11-25 14:27:41.560471669 +0000 UTC m=+153.163884740" lastFinishedPulling="2025-11-25 14:28:39.855244272 +0000 UTC m=+211.458657343" observedRunningTime="2025-11-25 14:28:41.161858169 +0000 UTC m=+212.765271230" watchObservedRunningTime="2025-11-25 14:28:41.16485207 +0000 UTC m=+212.768265151" Nov 25 14:28:42 crc kubenswrapper[4879]: I1125 14:28:42.115163 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerStarted","Data":"196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858"} Nov 25 14:28:42 crc kubenswrapper[4879]: I1125 14:28:42.121100 4879 generic.go:334] "Generic (PLEG): container finished" podID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerID="ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a" exitCode=0 Nov 25 14:28:42 crc kubenswrapper[4879]: I1125 14:28:42.121182 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q79zr" event={"ID":"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa","Type":"ContainerDied","Data":"ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a"} Nov 25 14:28:42 crc kubenswrapper[4879]: I1125 14:28:42.144553 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5rxmf" podStartSLOduration=2.509471691 podStartE2EDuration="1m0.144525521s" podCreationTimestamp="2025-11-25 14:27:42 +0000 UTC" firstStartedPulling="2025-11-25 14:27:43.656741996 +0000 UTC m=+155.260155067" lastFinishedPulling="2025-11-25 14:28:41.291795786 +0000 UTC m=+212.895208897" observedRunningTime="2025-11-25 14:28:42.139486682 +0000 UTC m=+213.742899753" watchObservedRunningTime="2025-11-25 14:28:42.144525521 +0000 UTC m=+213.747938592" Nov 25 14:28:42 crc kubenswrapper[4879]: I1125 14:28:42.779612 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:28:42 crc kubenswrapper[4879]: I1125 14:28:42.780097 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:28:43 crc kubenswrapper[4879]: I1125 14:28:43.127934 4879 generic.go:334] "Generic (PLEG): container finished" podID="49d11c77-256a-4941-af43-5ca7285521d5" containerID="34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b" exitCode=0 Nov 25 14:28:43 crc kubenswrapper[4879]: I1125 14:28:43.128006 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8s9x8" event={"ID":"49d11c77-256a-4941-af43-5ca7285521d5","Type":"ContainerDied","Data":"34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b"} Nov 25 14:28:43 crc kubenswrapper[4879]: I1125 14:28:43.133626 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85g2l" event={"ID":"0c959028-cdd8-44df-b9d6-92c193b3aa6d","Type":"ContainerStarted","Data":"7914c9eb47093902f10456e4e74302a7ed5d081e6571a63a1aa1b79f6ad3bebb"} Nov 25 14:28:43 crc kubenswrapper[4879]: I1125 14:28:43.166897 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-85g2l" podStartSLOduration=3.488356657 podStartE2EDuration="1m4.166880646s" podCreationTimestamp="2025-11-25 14:27:39 +0000 UTC" firstStartedPulling="2025-11-25 14:27:41.553761447 +0000 UTC m=+153.157174518" lastFinishedPulling="2025-11-25 14:28:42.232285436 +0000 UTC m=+213.835698507" observedRunningTime="2025-11-25 14:28:43.163556867 +0000 UTC m=+214.766969958" watchObservedRunningTime="2025-11-25 14:28:43.166880646 +0000 UTC m=+214.770293717" Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.070486 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5rxmf" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="registry-server" probeResult="failure" output=< Nov 25 14:28:44 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:28:44 crc kubenswrapper[4879]: > Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.141117 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cff9c" event={"ID":"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e","Type":"ContainerStarted","Data":"b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01"} Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.143513 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8s9x8" event={"ID":"49d11c77-256a-4941-af43-5ca7285521d5","Type":"ContainerStarted","Data":"11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9"} Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.146112 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q79zr" event={"ID":"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa","Type":"ContainerStarted","Data":"9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61"} Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.159553 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cff9c" podStartSLOduration=2.821344637 podStartE2EDuration="1m2.159522115s" podCreationTimestamp="2025-11-25 14:27:42 +0000 UTC" firstStartedPulling="2025-11-25 14:27:43.641247105 +0000 UTC m=+155.244660176" lastFinishedPulling="2025-11-25 14:28:42.979424583 +0000 UTC m=+214.582837654" observedRunningTime="2025-11-25 14:28:44.157000685 +0000 UTC m=+215.760413766" watchObservedRunningTime="2025-11-25 14:28:44.159522115 +0000 UTC m=+215.762935186" Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.175227 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8s9x8" podStartSLOduration=3.020396137 podStartE2EDuration="1m5.175201277s" podCreationTimestamp="2025-11-25 14:27:39 +0000 UTC" firstStartedPulling="2025-11-25 14:27:41.497483676 +0000 UTC m=+153.100896747" lastFinishedPulling="2025-11-25 14:28:43.652288816 +0000 UTC m=+215.255701887" observedRunningTime="2025-11-25 14:28:44.171986991 +0000 UTC m=+215.775400062" watchObservedRunningTime="2025-11-25 14:28:44.175201277 +0000 UTC m=+215.778614348" Nov 25 14:28:44 crc kubenswrapper[4879]: I1125 14:28:44.194376 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q79zr" podStartSLOduration=2.384542547 podStartE2EDuration="1m3.194351462s" podCreationTimestamp="2025-11-25 14:27:41 +0000 UTC" firstStartedPulling="2025-11-25 14:27:42.60373356 +0000 UTC m=+154.207146631" lastFinishedPulling="2025-11-25 14:28:43.413542475 +0000 UTC m=+215.016955546" observedRunningTime="2025-11-25 14:28:44.193227305 +0000 UTC m=+215.796640376" watchObservedRunningTime="2025-11-25 14:28:44.194351462 +0000 UTC m=+215.797764533" Nov 25 14:28:45 crc kubenswrapper[4879]: I1125 14:28:45.152951 4879 generic.go:334] "Generic (PLEG): container finished" podID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerID="3000c14f1c18b63a6d10981064194baf7055787bf866ffebc7c3206ddce6d5fd" exitCode=0 Nov 25 14:28:45 crc kubenswrapper[4879]: I1125 14:28:45.153341 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gc5xf" event={"ID":"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9","Type":"ContainerDied","Data":"3000c14f1c18b63a6d10981064194baf7055787bf866ffebc7c3206ddce6d5fd"} Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.164572 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gc5xf" event={"ID":"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9","Type":"ContainerStarted","Data":"bf0d8a2d0f25288eac7eb8e4bba35c305787cb46b4caf8d7a31eefeb2c94f554"} Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.187685 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gc5xf" podStartSLOduration=3.691273673 podStartE2EDuration="1m6.187650495s" podCreationTimestamp="2025-11-25 14:27:41 +0000 UTC" firstStartedPulling="2025-11-25 14:27:43.654466995 +0000 UTC m=+155.257880066" lastFinishedPulling="2025-11-25 14:28:46.150843817 +0000 UTC m=+217.754256888" observedRunningTime="2025-11-25 14:28:47.184011388 +0000 UTC m=+218.787424489" watchObservedRunningTime="2025-11-25 14:28:47.187650495 +0000 UTC m=+218.791063606" Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.409411 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.409504 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.409581 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.410419 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:28:47 crc kubenswrapper[4879]: I1125 14:28:47.410589 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba" gracePeriod=600 Nov 25 14:28:48 crc kubenswrapper[4879]: I1125 14:28:48.170988 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba" exitCode=0 Nov 25 14:28:48 crc kubenswrapper[4879]: I1125 14:28:48.171076 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba"} Nov 25 14:28:48 crc kubenswrapper[4879]: I1125 14:28:48.171356 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"a7d18fab85e4426190566b7565d13657d5f6365c513918e6ecead1fa02eff6cb"} Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.219953 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.219993 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.274906 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.416516 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.416560 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.480179 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.597920 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.597982 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:28:49 crc kubenswrapper[4879]: I1125 14:28:49.653362 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:28:50 crc kubenswrapper[4879]: I1125 14:28:50.230441 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:28:50 crc kubenswrapper[4879]: I1125 14:28:50.233104 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:28:50 crc kubenswrapper[4879]: I1125 14:28:50.242807 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:28:51 crc kubenswrapper[4879]: I1125 14:28:51.394896 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:28:51 crc kubenswrapper[4879]: I1125 14:28:51.395250 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:28:51 crc kubenswrapper[4879]: I1125 14:28:51.452645 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:28:51 crc kubenswrapper[4879]: I1125 14:28:51.771568 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:28:51 crc kubenswrapper[4879]: I1125 14:28:51.771632 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:28:51 crc kubenswrapper[4879]: I1125 14:28:51.807671 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.234616 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.238064 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.282469 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-85g2l"] Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.282748 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-85g2l" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="registry-server" containerID="cri-o://7914c9eb47093902f10456e4e74302a7ed5d081e6571a63a1aa1b79f6ad3bebb" gracePeriod=2 Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.383428 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.383510 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.430744 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.822247 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:28:52 crc kubenswrapper[4879]: I1125 14:28:52.860043 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:28:53 crc kubenswrapper[4879]: I1125 14:28:53.201978 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerID="7914c9eb47093902f10456e4e74302a7ed5d081e6571a63a1aa1b79f6ad3bebb" exitCode=0 Nov 25 14:28:53 crc kubenswrapper[4879]: I1125 14:28:53.202080 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85g2l" event={"ID":"0c959028-cdd8-44df-b9d6-92c193b3aa6d","Type":"ContainerDied","Data":"7914c9eb47093902f10456e4e74302a7ed5d081e6571a63a1aa1b79f6ad3bebb"} Nov 25 14:28:53 crc kubenswrapper[4879]: I1125 14:28:53.241254 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.372491 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.501093 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swclg\" (UniqueName: \"kubernetes.io/projected/0c959028-cdd8-44df-b9d6-92c193b3aa6d-kube-api-access-swclg\") pod \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.501588 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-utilities\") pod \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.501694 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-catalog-content\") pod \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\" (UID: \"0c959028-cdd8-44df-b9d6-92c193b3aa6d\") " Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.521452 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-utilities" (OuterVolumeSpecName: "utilities") pod "0c959028-cdd8-44df-b9d6-92c193b3aa6d" (UID: "0c959028-cdd8-44df-b9d6-92c193b3aa6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.522527 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c959028-cdd8-44df-b9d6-92c193b3aa6d-kube-api-access-swclg" (OuterVolumeSpecName: "kube-api-access-swclg") pod "0c959028-cdd8-44df-b9d6-92c193b3aa6d" (UID: "0c959028-cdd8-44df-b9d6-92c193b3aa6d"). InnerVolumeSpecName "kube-api-access-swclg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.604698 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swclg\" (UniqueName: \"kubernetes.io/projected/0c959028-cdd8-44df-b9d6-92c193b3aa6d-kube-api-access-swclg\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.604743 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:54 crc kubenswrapper[4879]: I1125 14:28:54.685512 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gc5xf"] Nov 25 14:28:55 crc kubenswrapper[4879]: I1125 14:28:55.214316 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-85g2l" event={"ID":"0c959028-cdd8-44df-b9d6-92c193b3aa6d","Type":"ContainerDied","Data":"75205545a428ff3eb11ed2cd24e113ea06d25677bcacbc39254996f8452cb321"} Nov 25 14:28:55 crc kubenswrapper[4879]: I1125 14:28:55.214380 4879 scope.go:117] "RemoveContainer" containerID="7914c9eb47093902f10456e4e74302a7ed5d081e6571a63a1aa1b79f6ad3bebb" Nov 25 14:28:55 crc kubenswrapper[4879]: I1125 14:28:55.214398 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-85g2l" Nov 25 14:28:55 crc kubenswrapper[4879]: I1125 14:28:55.214665 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gc5xf" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="registry-server" containerID="cri-o://bf0d8a2d0f25288eac7eb8e4bba35c305787cb46b4caf8d7a31eefeb2c94f554" gracePeriod=2 Nov 25 14:28:55 crc kubenswrapper[4879]: I1125 14:28:55.227008 4879 scope.go:117] "RemoveContainer" containerID="ed8b4d5e67e9f95d0c350a7803708dcfb86b7739d3a4ed990e4ac55eecbb65e1" Nov 25 14:28:55 crc kubenswrapper[4879]: I1125 14:28:55.239312 4879 scope.go:117] "RemoveContainer" containerID="c70de75b647da965675c1c20bac71955ba94d9969e1f4a62bf734db116fdce41" Nov 25 14:28:56 crc kubenswrapper[4879]: I1125 14:28:56.222622 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerStarted","Data":"57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737"} Nov 25 14:28:56 crc kubenswrapper[4879]: I1125 14:28:56.466824 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0c959028-cdd8-44df-b9d6-92c193b3aa6d" (UID: "0c959028-cdd8-44df-b9d6-92c193b3aa6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:28:56 crc kubenswrapper[4879]: I1125 14:28:56.561224 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c959028-cdd8-44df-b9d6-92c193b3aa6d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:56 crc kubenswrapper[4879]: I1125 14:28:56.742394 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-85g2l"] Nov 25 14:28:56 crc kubenswrapper[4879]: I1125 14:28:56.745585 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-85g2l"] Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.093555 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5rxmf"] Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.093800 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5rxmf" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="registry-server" containerID="cri-o://196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858" gracePeriod=2 Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.228585 4879 generic.go:334] "Generic (PLEG): container finished" podID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerID="57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737" exitCode=0 Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.228648 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerDied","Data":"57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737"} Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.231977 4879 generic.go:334] "Generic (PLEG): container finished" podID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerID="bf0d8a2d0f25288eac7eb8e4bba35c305787cb46b4caf8d7a31eefeb2c94f554" exitCode=0 Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.232030 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gc5xf" event={"ID":"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9","Type":"ContainerDied","Data":"bf0d8a2d0f25288eac7eb8e4bba35c305787cb46b4caf8d7a31eefeb2c94f554"} Nov 25 14:28:57 crc kubenswrapper[4879]: I1125 14:28:57.651314 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" path="/var/lib/kubelet/pods/0c959028-cdd8-44df-b9d6-92c193b3aa6d/volumes" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.777551 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.889139 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-catalog-content\") pod \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.889246 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmjkq\" (UniqueName: \"kubernetes.io/projected/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-kube-api-access-tmjkq\") pod \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.889283 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-utilities\") pod \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\" (UID: \"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9\") " Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.891413 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-utilities" (OuterVolumeSpecName: "utilities") pod "06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" (UID: "06af87d7-2cc2-44cd-aa6d-f855a64ce6b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.895976 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-kube-api-access-tmjkq" (OuterVolumeSpecName: "kube-api-access-tmjkq") pod "06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" (UID: "06af87d7-2cc2-44cd-aa6d-f855a64ce6b9"). InnerVolumeSpecName "kube-api-access-tmjkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.908235 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" (UID: "06af87d7-2cc2-44cd-aa6d-f855a64ce6b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.928860 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.990362 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.990405 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmjkq\" (UniqueName: \"kubernetes.io/projected/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-kube-api-access-tmjkq\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:58 crc kubenswrapper[4879]: I1125 14:28:58.990421 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.090848 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwnxj\" (UniqueName: \"kubernetes.io/projected/d2104658-98e8-415f-9c56-948de1dc2610-kube-api-access-fwnxj\") pod \"d2104658-98e8-415f-9c56-948de1dc2610\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.090907 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-catalog-content\") pod \"d2104658-98e8-415f-9c56-948de1dc2610\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.090980 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-utilities\") pod \"d2104658-98e8-415f-9c56-948de1dc2610\" (UID: \"d2104658-98e8-415f-9c56-948de1dc2610\") " Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.091718 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-utilities" (OuterVolumeSpecName: "utilities") pod "d2104658-98e8-415f-9c56-948de1dc2610" (UID: "d2104658-98e8-415f-9c56-948de1dc2610"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.093364 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2104658-98e8-415f-9c56-948de1dc2610-kube-api-access-fwnxj" (OuterVolumeSpecName: "kube-api-access-fwnxj") pod "d2104658-98e8-415f-9c56-948de1dc2610" (UID: "d2104658-98e8-415f-9c56-948de1dc2610"). InnerVolumeSpecName "kube-api-access-fwnxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.192194 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.192241 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwnxj\" (UniqueName: \"kubernetes.io/projected/d2104658-98e8-415f-9c56-948de1dc2610-kube-api-access-fwnxj\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.243177 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gc5xf" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.243185 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gc5xf" event={"ID":"06af87d7-2cc2-44cd-aa6d-f855a64ce6b9","Type":"ContainerDied","Data":"8f811a7cf6c3da4c64b5f4826e2570ad2a53ebe05e47bebf4bf19bf1af55b0cd"} Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.243575 4879 scope.go:117] "RemoveContainer" containerID="bf0d8a2d0f25288eac7eb8e4bba35c305787cb46b4caf8d7a31eefeb2c94f554" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.245345 4879 generic.go:334] "Generic (PLEG): container finished" podID="d2104658-98e8-415f-9c56-948de1dc2610" containerID="196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858" exitCode=0 Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.245405 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerDied","Data":"196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858"} Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.245423 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5rxmf" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.245439 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5rxmf" event={"ID":"d2104658-98e8-415f-9c56-948de1dc2610","Type":"ContainerDied","Data":"10c51f31b2a8eb7765b8a125b0d13ba3079a431c84914395364a13ce903b682e"} Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.262016 4879 scope.go:117] "RemoveContainer" containerID="3000c14f1c18b63a6d10981064194baf7055787bf866ffebc7c3206ddce6d5fd" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.281183 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gc5xf"] Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.282738 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gc5xf"] Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.292681 4879 scope.go:117] "RemoveContainer" containerID="b3c89bcaf5caccc21595e977d6ed210497b55f5f99f14e22bf4cb59fc106c714" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.304811 4879 scope.go:117] "RemoveContainer" containerID="196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.317856 4879 scope.go:117] "RemoveContainer" containerID="edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.329960 4879 scope.go:117] "RemoveContainer" containerID="108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.344982 4879 scope.go:117] "RemoveContainer" containerID="196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858" Nov 25 14:28:59 crc kubenswrapper[4879]: E1125 14:28:59.345554 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858\": container with ID starting with 196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858 not found: ID does not exist" containerID="196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.345601 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858"} err="failed to get container status \"196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858\": rpc error: code = NotFound desc = could not find container \"196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858\": container with ID starting with 196922ed693371b829442d429a678a2e7767e4cef74cdc8de68babe19d386858 not found: ID does not exist" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.345631 4879 scope.go:117] "RemoveContainer" containerID="edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93" Nov 25 14:28:59 crc kubenswrapper[4879]: E1125 14:28:59.345996 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93\": container with ID starting with edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93 not found: ID does not exist" containerID="edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.346036 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93"} err="failed to get container status \"edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93\": rpc error: code = NotFound desc = could not find container \"edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93\": container with ID starting with edfb613978110a3d22519da8cdcf1ad702fa98739b6179a89ba6b25d8bf0ba93 not found: ID does not exist" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.346063 4879 scope.go:117] "RemoveContainer" containerID="108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d" Nov 25 14:28:59 crc kubenswrapper[4879]: E1125 14:28:59.346322 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d\": container with ID starting with 108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d not found: ID does not exist" containerID="108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.346347 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d"} err="failed to get container status \"108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d\": rpc error: code = NotFound desc = could not find container \"108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d\": container with ID starting with 108f32f0d0975f3273207da6e6c08b11dd4b8322948a7bb704ee9989f463222d not found: ID does not exist" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.598803 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2104658-98e8-415f-9c56-948de1dc2610" (UID: "d2104658-98e8-415f-9c56-948de1dc2610"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.651246 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" path="/var/lib/kubelet/pods/06af87d7-2cc2-44cd-aa6d-f855a64ce6b9/volumes" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.698340 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2104658-98e8-415f-9c56-948de1dc2610-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.865831 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5rxmf"] Nov 25 14:28:59 crc kubenswrapper[4879]: I1125 14:28:59.869067 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5rxmf"] Nov 25 14:29:01 crc kubenswrapper[4879]: I1125 14:29:01.652741 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2104658-98e8-415f-9c56-948de1dc2610" path="/var/lib/kubelet/pods/d2104658-98e8-415f-9c56-948de1dc2610/volumes" Nov 25 14:29:01 crc kubenswrapper[4879]: I1125 14:29:01.971982 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-v9t69"] Nov 25 14:29:04 crc kubenswrapper[4879]: I1125 14:29:04.281837 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerStarted","Data":"403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9"} Nov 25 14:29:04 crc kubenswrapper[4879]: I1125 14:29:04.304059 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tcmmp" podStartSLOduration=3.611489317 podStartE2EDuration="1m25.304040996s" podCreationTimestamp="2025-11-25 14:27:39 +0000 UTC" firstStartedPulling="2025-11-25 14:27:41.523231887 +0000 UTC m=+153.126644958" lastFinishedPulling="2025-11-25 14:29:03.215783566 +0000 UTC m=+234.819196637" observedRunningTime="2025-11-25 14:29:04.301800312 +0000 UTC m=+235.905213403" watchObservedRunningTime="2025-11-25 14:29:04.304040996 +0000 UTC m=+235.907454067" Nov 25 14:29:09 crc kubenswrapper[4879]: I1125 14:29:09.845844 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:29:09 crc kubenswrapper[4879]: I1125 14:29:09.846492 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:29:09 crc kubenswrapper[4879]: I1125 14:29:09.914826 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:29:10 crc kubenswrapper[4879]: I1125 14:29:10.387445 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:29:10 crc kubenswrapper[4879]: I1125 14:29:10.430066 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tcmmp"] Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.327205 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tcmmp" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="registry-server" containerID="cri-o://403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9" gracePeriod=2 Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.701983 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.853327 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-utilities\") pod \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.853382 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-catalog-content\") pod \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.853439 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sv7z\" (UniqueName: \"kubernetes.io/projected/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-kube-api-access-7sv7z\") pod \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\" (UID: \"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a\") " Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.854530 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-utilities" (OuterVolumeSpecName: "utilities") pod "2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" (UID: "2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.859288 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-kube-api-access-7sv7z" (OuterVolumeSpecName: "kube-api-access-7sv7z") pod "2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" (UID: "2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a"). InnerVolumeSpecName "kube-api-access-7sv7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.897438 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" (UID: "2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.954679 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sv7z\" (UniqueName: \"kubernetes.io/projected/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-kube-api-access-7sv7z\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.954710 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:12 crc kubenswrapper[4879]: I1125 14:29:12.954721 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.339268 4879 generic.go:334] "Generic (PLEG): container finished" podID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerID="403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9" exitCode=0 Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.339319 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerDied","Data":"403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9"} Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.339349 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcmmp" event={"ID":"2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a","Type":"ContainerDied","Data":"aa54e5c818b2033ba48385e7da518aa5b1ba0de43c1470c9b9d5871b09b08a58"} Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.339371 4879 scope.go:117] "RemoveContainer" containerID="403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.339496 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcmmp" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.357597 4879 scope.go:117] "RemoveContainer" containerID="57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.371160 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tcmmp"] Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.378641 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tcmmp"] Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.390092 4879 scope.go:117] "RemoveContainer" containerID="9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.405194 4879 scope.go:117] "RemoveContainer" containerID="403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9" Nov 25 14:29:13 crc kubenswrapper[4879]: E1125 14:29:13.405720 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9\": container with ID starting with 403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9 not found: ID does not exist" containerID="403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.405751 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9"} err="failed to get container status \"403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9\": rpc error: code = NotFound desc = could not find container \"403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9\": container with ID starting with 403e02e5d50c71f1c635086412e006857a2ba5e25c599d1ddcb11ee0769f97e9 not found: ID does not exist" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.405802 4879 scope.go:117] "RemoveContainer" containerID="57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737" Nov 25 14:29:13 crc kubenswrapper[4879]: E1125 14:29:13.406324 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737\": container with ID starting with 57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737 not found: ID does not exist" containerID="57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.406357 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737"} err="failed to get container status \"57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737\": rpc error: code = NotFound desc = could not find container \"57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737\": container with ID starting with 57d1ab6e1df36f0c9f553af310ffa4e8be142be9d81824be81beb2f6b50b2737 not found: ID does not exist" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.406371 4879 scope.go:117] "RemoveContainer" containerID="9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07" Nov 25 14:29:13 crc kubenswrapper[4879]: E1125 14:29:13.406645 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07\": container with ID starting with 9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07 not found: ID does not exist" containerID="9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.406667 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07"} err="failed to get container status \"9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07\": rpc error: code = NotFound desc = could not find container \"9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07\": container with ID starting with 9d2380f3082271e9e32b56cc6faa371c1108f87f45873b1007f5e7bd77198e07 not found: ID does not exist" Nov 25 14:29:13 crc kubenswrapper[4879]: I1125 14:29:13.654133 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" path="/var/lib/kubelet/pods/2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a/volumes" Nov 25 14:29:26 crc kubenswrapper[4879]: I1125 14:29:26.994160 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" containerName="oauth-openshift" containerID="cri-o://51bbb1cdd4373f565674cf95eb6515dd1166dfbe20a47401e6b3333e5479089f" gracePeriod=15 Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.417573 4879 generic.go:334] "Generic (PLEG): container finished" podID="f8acacef-b1cb-4832-b107-964e0325cb17" containerID="51bbb1cdd4373f565674cf95eb6515dd1166dfbe20a47401e6b3333e5479089f" exitCode=0 Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.417630 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" event={"ID":"f8acacef-b1cb-4832-b107-964e0325cb17","Type":"ContainerDied","Data":"51bbb1cdd4373f565674cf95eb6515dd1166dfbe20a47401e6b3333e5479089f"} Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.832991 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.873936 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6bf5fff678-vlxv7"] Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.874506 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.874525 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.874546 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.874554 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.874563 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" containerName="oauth-openshift" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.874572 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" containerName="oauth-openshift" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.874589 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.874595 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.874638 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878050 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878108 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878137 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878165 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878175 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878190 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878204 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878234 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878243 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878266 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878272 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="extract-utilities" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878286 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653c7beb-0b48-4f5f-bbc0-6d2e96ff8820" containerName="pruner" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878292 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="653c7beb-0b48-4f5f-bbc0-6d2e96ff8820" containerName="pruner" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878305 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878311 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="extract-content" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878326 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878337 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: E1125 14:29:27.878351 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878357 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878652 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f11a0c9-fcf2-4b7f-a7e1-59a24a0a520a" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878681 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2104658-98e8-415f-9c56-948de1dc2610" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878693 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="653c7beb-0b48-4f5f-bbc0-6d2e96ff8820" containerName="pruner" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878710 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="06af87d7-2cc2-44cd-aa6d-f855a64ce6b9" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878719 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c959028-cdd8-44df-b9d6-92c193b3aa6d" containerName="registry-server" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.878735 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" containerName="oauth-openshift" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.879535 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.886845 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6bf5fff678-vlxv7"] Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932592 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-session\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932672 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-idp-0-file-data\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932718 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-provider-selection\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932760 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-router-certs\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932809 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-login\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932838 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-error\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932863 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnqfz\" (UniqueName: \"kubernetes.io/projected/f8acacef-b1cb-4832-b107-964e0325cb17-kube-api-access-tnqfz\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932898 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-audit-policies\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932929 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8acacef-b1cb-4832-b107-964e0325cb17-audit-dir\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.932964 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-trusted-ca-bundle\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.933008 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-service-ca\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.933049 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-serving-cert\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.933093 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-ocp-branding-template\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.933169 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-cliconfig\") pod \"f8acacef-b1cb-4832-b107-964e0325cb17\" (UID: \"f8acacef-b1cb-4832-b107-964e0325cb17\") " Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.933283 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f8acacef-b1cb-4832-b107-964e0325cb17-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.933595 4879 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f8acacef-b1cb-4832-b107-964e0325cb17-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.934154 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.934237 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.934320 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.934860 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.938100 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.938273 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8acacef-b1cb-4832-b107-964e0325cb17-kube-api-access-tnqfz" (OuterVolumeSpecName: "kube-api-access-tnqfz") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "kube-api-access-tnqfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.938670 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.939185 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.939991 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.940308 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.940892 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.941205 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:27 crc kubenswrapper[4879]: I1125 14:29:27.941370 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f8acacef-b1cb-4832-b107-964e0325cb17" (UID: "f8acacef-b1cb-4832-b107-964e0325cb17"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.034935 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-service-ca\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036258 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-router-certs\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036339 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbxrt\" (UniqueName: \"kubernetes.io/projected/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-kube-api-access-qbxrt\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036396 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036446 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036470 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036506 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036549 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036647 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-login\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036701 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-audit-dir\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036765 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036797 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-session\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036835 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-audit-policies\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036862 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-error\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036960 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036977 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.036989 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037001 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037019 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037030 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037042 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037054 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037067 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037079 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037091 4879 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f8acacef-b1cb-4832-b107-964e0325cb17-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037103 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnqfz\" (UniqueName: \"kubernetes.io/projected/f8acacef-b1cb-4832-b107-964e0325cb17-kube-api-access-tnqfz\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.037114 4879 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f8acacef-b1cb-4832-b107-964e0325cb17-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.137990 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138039 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-session\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138070 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-audit-policies\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138090 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-error\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138108 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-service-ca\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138142 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-router-certs\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138158 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbxrt\" (UniqueName: \"kubernetes.io/projected/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-kube-api-access-qbxrt\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138185 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138213 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138240 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138266 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138294 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138331 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-login\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138360 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-audit-dir\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138436 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-audit-dir\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138750 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.138783 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-audit-policies\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.139725 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-service-ca\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.140287 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.141612 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-router-certs\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.141647 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-session\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.141751 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.142115 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-error\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.143006 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.143107 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-login\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.144169 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.144547 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.155239 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbxrt\" (UniqueName: \"kubernetes.io/projected/83bbcedf-6702-4a28-9e3b-593e4c90a7a8-kube-api-access-qbxrt\") pod \"oauth-openshift-6bf5fff678-vlxv7\" (UID: \"83bbcedf-6702-4a28-9e3b-593e4c90a7a8\") " pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.207116 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.423578 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" event={"ID":"f8acacef-b1cb-4832-b107-964e0325cb17","Type":"ContainerDied","Data":"ef45d64cdc33151ea54ef12fbea3a4567c2d6c7fe6aba39e752481faf7c819fa"} Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.423628 4879 scope.go:117] "RemoveContainer" containerID="51bbb1cdd4373f565674cf95eb6515dd1166dfbe20a47401e6b3333e5479089f" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.423627 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-v9t69" Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.451692 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-v9t69"] Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.454044 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-v9t69"] Nov 25 14:29:28 crc kubenswrapper[4879]: I1125 14:29:28.607113 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6bf5fff678-vlxv7"] Nov 25 14:29:28 crc kubenswrapper[4879]: W1125 14:29:28.614709 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83bbcedf_6702_4a28_9e3b_593e4c90a7a8.slice/crio-5ee9af96b7d490a9bb2ebd3c62eff28fa2d67649d7700d07121e065b0f26b518 WatchSource:0}: Error finding container 5ee9af96b7d490a9bb2ebd3c62eff28fa2d67649d7700d07121e065b0f26b518: Status 404 returned error can't find the container with id 5ee9af96b7d490a9bb2ebd3c62eff28fa2d67649d7700d07121e065b0f26b518 Nov 25 14:29:29 crc kubenswrapper[4879]: I1125 14:29:29.431186 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" event={"ID":"83bbcedf-6702-4a28-9e3b-593e4c90a7a8","Type":"ContainerStarted","Data":"e0c410108586d38eab2d90fc5a2d905cc5eca2b2587ada56b761dadc353a0844"} Nov 25 14:29:29 crc kubenswrapper[4879]: I1125 14:29:29.431253 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" event={"ID":"83bbcedf-6702-4a28-9e3b-593e4c90a7a8","Type":"ContainerStarted","Data":"5ee9af96b7d490a9bb2ebd3c62eff28fa2d67649d7700d07121e065b0f26b518"} Nov 25 14:29:29 crc kubenswrapper[4879]: I1125 14:29:29.431286 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:29 crc kubenswrapper[4879]: I1125 14:29:29.436949 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" Nov 25 14:29:29 crc kubenswrapper[4879]: I1125 14:29:29.479036 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6bf5fff678-vlxv7" podStartSLOduration=27.479021506 podStartE2EDuration="27.479021506s" podCreationTimestamp="2025-11-25 14:29:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:29:29.456182042 +0000 UTC m=+261.059595133" watchObservedRunningTime="2025-11-25 14:29:29.479021506 +0000 UTC m=+261.082434577" Nov 25 14:29:29 crc kubenswrapper[4879]: I1125 14:29:29.654237 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8acacef-b1cb-4832-b107-964e0325cb17" path="/var/lib/kubelet/pods/f8acacef-b1cb-4832-b107-964e0325cb17/volumes" Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.952643 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8s9x8"] Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.954192 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8s9x8" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="registry-server" containerID="cri-o://11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9" gracePeriod=30 Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.964097 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ts4q6"] Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.964568 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ts4q6" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="registry-server" containerID="cri-o://d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" gracePeriod=30 Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.973386 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mlcb5"] Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.973592 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" containerID="cri-o://14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a" gracePeriod=30 Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.984545 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q79zr"] Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.984860 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q79zr" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="registry-server" containerID="cri-o://9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61" gracePeriod=30 Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.992665 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cff9c"] Nov 25 14:29:48 crc kubenswrapper[4879]: I1125 14:29:48.993082 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cff9c" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="registry-server" containerID="cri-o://b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01" gracePeriod=30 Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.006049 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8nth"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.012247 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.017321 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8nth"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.099263 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.099330 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.099374 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6djx\" (UniqueName: \"kubernetes.io/projected/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-kube-api-access-l6djx\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.199745 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.199794 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6djx\" (UniqueName: \"kubernetes.io/projected/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-kube-api-access-l6djx\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.199868 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.201904 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.206850 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.221149 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd is running failed: container process not found" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.221646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6djx\" (UniqueName: \"kubernetes.io/projected/ac0dfdc7-d570-411d-87c6-c3f58bdf04ad-kube-api-access-l6djx\") pod \"marketplace-operator-79b997595-s8nth\" (UID: \"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.221686 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd is running failed: container process not found" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.225456 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd is running failed: container process not found" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.225506 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-ts4q6" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="registry-server" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.358153 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.361811 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.401632 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-catalog-content\") pod \"49d11c77-256a-4941-af43-5ca7285521d5\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.401668 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-utilities\") pod \"49d11c77-256a-4941-af43-5ca7285521d5\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.401739 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2zql\" (UniqueName: \"kubernetes.io/projected/49d11c77-256a-4941-af43-5ca7285521d5-kube-api-access-x2zql\") pod \"49d11c77-256a-4941-af43-5ca7285521d5\" (UID: \"49d11c77-256a-4941-af43-5ca7285521d5\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.402837 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-utilities" (OuterVolumeSpecName: "utilities") pod "49d11c77-256a-4941-af43-5ca7285521d5" (UID: "49d11c77-256a-4941-af43-5ca7285521d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.404939 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49d11c77-256a-4941-af43-5ca7285521d5-kube-api-access-x2zql" (OuterVolumeSpecName: "kube-api-access-x2zql") pod "49d11c77-256a-4941-af43-5ca7285521d5" (UID: "49d11c77-256a-4941-af43-5ca7285521d5"). InnerVolumeSpecName "kube-api-access-x2zql". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.434655 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.439084 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.459531 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.460592 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "49d11c77-256a-4941-af43-5ca7285521d5" (UID: "49d11c77-256a-4941-af43-5ca7285521d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.468446 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502785 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-catalog-content\") pod \"da124515-51aa-4521-8a22-f5239f2afaf3\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502856 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w64gv\" (UniqueName: \"kubernetes.io/projected/b2bed98f-606e-49cb-88fc-76a5bd20af09-kube-api-access-w64gv\") pod \"b2bed98f-606e-49cb-88fc-76a5bd20af09\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502880 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-utilities\") pod \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502916 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-trusted-ca\") pod \"b2bed98f-606e-49cb-88fc-76a5bd20af09\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502940 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqqld\" (UniqueName: \"kubernetes.io/projected/da124515-51aa-4521-8a22-f5239f2afaf3-kube-api-access-gqqld\") pod \"da124515-51aa-4521-8a22-f5239f2afaf3\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502962 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-utilities\") pod \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.502984 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-catalog-content\") pod \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503021 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qht2c\" (UniqueName: \"kubernetes.io/projected/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-kube-api-access-qht2c\") pod \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503037 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwzdg\" (UniqueName: \"kubernetes.io/projected/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-kube-api-access-pwzdg\") pod \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\" (UID: \"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503070 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-operator-metrics\") pod \"b2bed98f-606e-49cb-88fc-76a5bd20af09\" (UID: \"b2bed98f-606e-49cb-88fc-76a5bd20af09\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503096 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-utilities\") pod \"da124515-51aa-4521-8a22-f5239f2afaf3\" (UID: \"da124515-51aa-4521-8a22-f5239f2afaf3\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503116 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-catalog-content\") pod \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\" (UID: \"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa\") " Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503520 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503535 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49d11c77-256a-4941-af43-5ca7285521d5-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.503546 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2zql\" (UniqueName: \"kubernetes.io/projected/49d11c77-256a-4941-af43-5ca7285521d5-kube-api-access-x2zql\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.504683 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-utilities" (OuterVolumeSpecName: "utilities") pod "c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" (UID: "c80e42d3-60e0-43cd-a2d5-455e7a61cd1e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.505243 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-utilities" (OuterVolumeSpecName: "utilities") pod "d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" (UID: "d022699c-c1c7-40e3-8a77-bd9dbd66b1aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.505481 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b2bed98f-606e-49cb-88fc-76a5bd20af09" (UID: "b2bed98f-606e-49cb-88fc-76a5bd20af09"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.508086 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-utilities" (OuterVolumeSpecName: "utilities") pod "da124515-51aa-4521-8a22-f5239f2afaf3" (UID: "da124515-51aa-4521-8a22-f5239f2afaf3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.508191 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da124515-51aa-4521-8a22-f5239f2afaf3-kube-api-access-gqqld" (OuterVolumeSpecName: "kube-api-access-gqqld") pod "da124515-51aa-4521-8a22-f5239f2afaf3" (UID: "da124515-51aa-4521-8a22-f5239f2afaf3"). InnerVolumeSpecName "kube-api-access-gqqld". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.508391 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-kube-api-access-qht2c" (OuterVolumeSpecName: "kube-api-access-qht2c") pod "d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" (UID: "d022699c-c1c7-40e3-8a77-bd9dbd66b1aa"). InnerVolumeSpecName "kube-api-access-qht2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.509006 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2bed98f-606e-49cb-88fc-76a5bd20af09-kube-api-access-w64gv" (OuterVolumeSpecName: "kube-api-access-w64gv") pod "b2bed98f-606e-49cb-88fc-76a5bd20af09" (UID: "b2bed98f-606e-49cb-88fc-76a5bd20af09"). InnerVolumeSpecName "kube-api-access-w64gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.511027 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b2bed98f-606e-49cb-88fc-76a5bd20af09" (UID: "b2bed98f-606e-49cb-88fc-76a5bd20af09"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.523245 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" (UID: "d022699c-c1c7-40e3-8a77-bd9dbd66b1aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.523935 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-kube-api-access-pwzdg" (OuterVolumeSpecName: "kube-api-access-pwzdg") pod "c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" (UID: "c80e42d3-60e0-43cd-a2d5-455e7a61cd1e"). InnerVolumeSpecName "kube-api-access-pwzdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.546970 4879 generic.go:334] "Generic (PLEG): container finished" podID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerID="9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61" exitCode=0 Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.547024 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q79zr" event={"ID":"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa","Type":"ContainerDied","Data":"9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.547046 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q79zr" event={"ID":"d022699c-c1c7-40e3-8a77-bd9dbd66b1aa","Type":"ContainerDied","Data":"39c21192c67bb9cadd961581d871dd681874391069884e124d67bc9eb47ffc93"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.547063 4879 scope.go:117] "RemoveContainer" containerID="9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.547350 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q79zr" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.551092 4879 generic.go:334] "Generic (PLEG): container finished" podID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerID="14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a" exitCode=0 Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.551228 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" event={"ID":"b2bed98f-606e-49cb-88fc-76a5bd20af09","Type":"ContainerDied","Data":"14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.551249 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" event={"ID":"b2bed98f-606e-49cb-88fc-76a5bd20af09","Type":"ContainerDied","Data":"939985f53cde913ba06dbf3f2fee3d56d8444b892843d8d6d3ba208261d40f8e"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.551288 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mlcb5" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.556341 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cff9c" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.556368 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cff9c" event={"ID":"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e","Type":"ContainerDied","Data":"b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.556940 4879 generic.go:334] "Generic (PLEG): container finished" podID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerID="b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01" exitCode=0 Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.556992 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cff9c" event={"ID":"c80e42d3-60e0-43cd-a2d5-455e7a61cd1e","Type":"ContainerDied","Data":"14239db5b1bce35330727ec93b43186974c753983449d1ce39c4e28144684a80"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.559862 4879 generic.go:334] "Generic (PLEG): container finished" podID="49d11c77-256a-4941-af43-5ca7285521d5" containerID="11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9" exitCode=0 Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.559885 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8s9x8" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.559922 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8s9x8" event={"ID":"49d11c77-256a-4941-af43-5ca7285521d5","Type":"ContainerDied","Data":"11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.559942 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8s9x8" event={"ID":"49d11c77-256a-4941-af43-5ca7285521d5","Type":"ContainerDied","Data":"57c7e44faed79cde1356637d1f4cbd7fc0f4f052e89f23eddcc0184f2f466216"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.568557 4879 generic.go:334] "Generic (PLEG): container finished" podID="da124515-51aa-4521-8a22-f5239f2afaf3" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" exitCode=0 Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.568595 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ts4q6" event={"ID":"da124515-51aa-4521-8a22-f5239f2afaf3","Type":"ContainerDied","Data":"d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.568620 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ts4q6" event={"ID":"da124515-51aa-4521-8a22-f5239f2afaf3","Type":"ContainerDied","Data":"401d2fd6c7411621c4bb8a5b1bc5158e70a392316220415c9853e14213844249"} Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.568680 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ts4q6" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.577542 4879 scope.go:117] "RemoveContainer" containerID="ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.589048 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mlcb5"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.594832 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mlcb5"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.603340 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q79zr"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604643 4879 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604679 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqqld\" (UniqueName: \"kubernetes.io/projected/da124515-51aa-4521-8a22-f5239f2afaf3-kube-api-access-gqqld\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604690 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604702 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qht2c\" (UniqueName: \"kubernetes.io/projected/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-kube-api-access-qht2c\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604711 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwzdg\" (UniqueName: \"kubernetes.io/projected/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-kube-api-access-pwzdg\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604720 4879 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b2bed98f-606e-49cb-88fc-76a5bd20af09-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604728 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604736 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604758 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w64gv\" (UniqueName: \"kubernetes.io/projected/b2bed98f-606e-49cb-88fc-76a5bd20af09-kube-api-access-w64gv\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.604766 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.607927 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q79zr"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.609283 4879 scope.go:117] "RemoveContainer" containerID="5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.613776 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8s9x8"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.617797 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8s9x8"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.618711 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da124515-51aa-4521-8a22-f5239f2afaf3" (UID: "da124515-51aa-4521-8a22-f5239f2afaf3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.633959 4879 scope.go:117] "RemoveContainer" containerID="9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.634538 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61\": container with ID starting with 9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61 not found: ID does not exist" containerID="9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.634599 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61"} err="failed to get container status \"9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61\": rpc error: code = NotFound desc = could not find container \"9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61\": container with ID starting with 9bf948aa38d74327fbf18be3bb1260fb0cd8cdd0ae1a09ce080fbb02f4195f61 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.634627 4879 scope.go:117] "RemoveContainer" containerID="ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.635366 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a\": container with ID starting with ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a not found: ID does not exist" containerID="ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.635417 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a"} err="failed to get container status \"ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a\": rpc error: code = NotFound desc = could not find container \"ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a\": container with ID starting with ed96cf37c744aa2159ab10ba03d20b989489cb9918e44f984bff5f4e517eb00a not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.635431 4879 scope.go:117] "RemoveContainer" containerID="5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.635708 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8\": container with ID starting with 5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8 not found: ID does not exist" containerID="5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.635759 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8"} err="failed to get container status \"5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8\": rpc error: code = NotFound desc = could not find container \"5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8\": container with ID starting with 5d07f58ebf78c58a7b9182e30211a4e8aaed3c0f145a08bad7e74cd0350074e8 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.635793 4879 scope.go:117] "RemoveContainer" containerID="14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.651056 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49d11c77-256a-4941-af43-5ca7285521d5" path="/var/lib/kubelet/pods/49d11c77-256a-4941-af43-5ca7285521d5/volumes" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.651672 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" path="/var/lib/kubelet/pods/b2bed98f-606e-49cb-88fc-76a5bd20af09/volumes" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.651910 4879 scope.go:117] "RemoveContainer" containerID="14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.652105 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" path="/var/lib/kubelet/pods/d022699c-c1c7-40e3-8a77-bd9dbd66b1aa/volumes" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.654585 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a\": container with ID starting with 14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a not found: ID does not exist" containerID="14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.654627 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a"} err="failed to get container status \"14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a\": rpc error: code = NotFound desc = could not find container \"14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a\": container with ID starting with 14136373f31bc1709f2d87ec89628fef7f3d2117a573201d54531b3414c25e4a not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.654662 4879 scope.go:117] "RemoveContainer" containerID="b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.661270 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" (UID: "c80e42d3-60e0-43cd-a2d5-455e7a61cd1e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.670884 4879 scope.go:117] "RemoveContainer" containerID="3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.698900 4879 scope.go:117] "RemoveContainer" containerID="6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.706245 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da124515-51aa-4521-8a22-f5239f2afaf3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.706279 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.715817 4879 scope.go:117] "RemoveContainer" containerID="b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.716295 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01\": container with ID starting with b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01 not found: ID does not exist" containerID="b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.716353 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01"} err="failed to get container status \"b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01\": rpc error: code = NotFound desc = could not find container \"b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01\": container with ID starting with b2d731ca2c406797a58e6a1eee842c0ddeeedb9b176b790087fc3e2618f9fd01 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.716397 4879 scope.go:117] "RemoveContainer" containerID="3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.716825 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b\": container with ID starting with 3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b not found: ID does not exist" containerID="3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.716866 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b"} err="failed to get container status \"3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b\": rpc error: code = NotFound desc = could not find container \"3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b\": container with ID starting with 3c04d308c6600b711129856172c3cd69da1ea9cf17d87107ba22faa6ea77e36b not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.716884 4879 scope.go:117] "RemoveContainer" containerID="6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.717291 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03\": container with ID starting with 6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03 not found: ID does not exist" containerID="6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.717319 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03"} err="failed to get container status \"6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03\": rpc error: code = NotFound desc = could not find container \"6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03\": container with ID starting with 6bc1613a1f65d295f1d8a70049f2fd547c151a652f6e18e1207a3383c5424a03 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.717338 4879 scope.go:117] "RemoveContainer" containerID="11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.733862 4879 scope.go:117] "RemoveContainer" containerID="34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.751209 4879 scope.go:117] "RemoveContainer" containerID="bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.767894 4879 scope.go:117] "RemoveContainer" containerID="11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.768319 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9\": container with ID starting with 11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9 not found: ID does not exist" containerID="11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.768365 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9"} err="failed to get container status \"11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9\": rpc error: code = NotFound desc = could not find container \"11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9\": container with ID starting with 11af630baaceb1d3f5035c45e4ae2604118336ed5e9d84de84500f253bfacfa9 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.768394 4879 scope.go:117] "RemoveContainer" containerID="34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.768980 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b\": container with ID starting with 34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b not found: ID does not exist" containerID="34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.769005 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b"} err="failed to get container status \"34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b\": rpc error: code = NotFound desc = could not find container \"34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b\": container with ID starting with 34874d99bbb79ac499eef5638094227d3155c1e4698658b33d3cb51d63135e8b not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.769028 4879 scope.go:117] "RemoveContainer" containerID="bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.769619 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720\": container with ID starting with bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720 not found: ID does not exist" containerID="bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.769644 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720"} err="failed to get container status \"bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720\": rpc error: code = NotFound desc = could not find container \"bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720\": container with ID starting with bbac7e7ce0791fdb18d782139a176b0840b22c5d23e21209abfc9a2bc6cc0720 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.769657 4879 scope.go:117] "RemoveContainer" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.785395 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s8nth"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.786453 4879 scope.go:117] "RemoveContainer" containerID="47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.804447 4879 scope.go:117] "RemoveContainer" containerID="0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.827633 4879 scope.go:117] "RemoveContainer" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.828063 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd\": container with ID starting with d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd not found: ID does not exist" containerID="d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.828098 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd"} err="failed to get container status \"d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd\": rpc error: code = NotFound desc = could not find container \"d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd\": container with ID starting with d4fb256d8474b7a781794e10e9c50f18b09b6548b497b9e9da81867b08bf84cd not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.828142 4879 scope.go:117] "RemoveContainer" containerID="47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.828711 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98\": container with ID starting with 47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98 not found: ID does not exist" containerID="47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.828838 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98"} err="failed to get container status \"47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98\": rpc error: code = NotFound desc = could not find container \"47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98\": container with ID starting with 47d5e7ef6882a9cac4e37e30d604412c50683dd29d0a6a121814498f89ba7b98 not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.828895 4879 scope.go:117] "RemoveContainer" containerID="0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa" Nov 25 14:29:49 crc kubenswrapper[4879]: E1125 14:29:49.829316 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa\": container with ID starting with 0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa not found: ID does not exist" containerID="0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.829359 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa"} err="failed to get container status \"0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa\": rpc error: code = NotFound desc = could not find container \"0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa\": container with ID starting with 0af8faa5461572d1f502abddff2f5b691868030fdca4ea2c2e96692423ea97fa not found: ID does not exist" Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.884019 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cff9c"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.888307 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cff9c"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.897443 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ts4q6"] Nov 25 14:29:49 crc kubenswrapper[4879]: I1125 14:29:49.901386 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ts4q6"] Nov 25 14:29:50 crc kubenswrapper[4879]: I1125 14:29:50.580495 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" event={"ID":"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad","Type":"ContainerStarted","Data":"21fbf2c08f619fb0b93cf088881ecfe46677554132a7c9b6e733ee46796cbaad"} Nov 25 14:29:50 crc kubenswrapper[4879]: I1125 14:29:50.580739 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" event={"ID":"ac0dfdc7-d570-411d-87c6-c3f58bdf04ad","Type":"ContainerStarted","Data":"fc2f29e62c26a178543020f888c95a3d07743108e4da417d09607aacd3a35141"} Nov 25 14:29:50 crc kubenswrapper[4879]: I1125 14:29:50.581067 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:50 crc kubenswrapper[4879]: I1125 14:29:50.586719 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" Nov 25 14:29:50 crc kubenswrapper[4879]: I1125 14:29:50.599522 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-s8nth" podStartSLOduration=2.599506379 podStartE2EDuration="2.599506379s" podCreationTimestamp="2025-11-25 14:29:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:29:50.597304558 +0000 UTC m=+282.200717649" watchObservedRunningTime="2025-11-25 14:29:50.599506379 +0000 UTC m=+282.202919450" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166252 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ndg5n"] Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166445 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166457 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166467 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166473 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166481 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166487 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166497 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166503 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166510 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166516 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166524 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166529 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166537 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166542 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166552 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166558 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166567 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166573 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166580 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166586 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="extract-utilities" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166593 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166598 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166605 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166613 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: E1125 14:29:51.166622 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166627 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="extract-content" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166701 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166713 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d022699c-c1c7-40e3-8a77-bd9dbd66b1aa" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166720 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="49d11c77-256a-4941-af43-5ca7285521d5" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166726 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" containerName="registry-server" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.166735 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2bed98f-606e-49cb-88fc-76a5bd20af09" containerName="marketplace-operator" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.168669 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.170684 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.176334 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndg5n"] Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.326103 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77e0321b-48ed-499e-ac29-e06a8cb770ec-catalog-content\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.326198 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77e0321b-48ed-499e-ac29-e06a8cb770ec-utilities\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.326246 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7hgm\" (UniqueName: \"kubernetes.io/projected/77e0321b-48ed-499e-ac29-e06a8cb770ec-kube-api-access-p7hgm\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.366415 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d6t8h"] Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.367865 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.370496 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.378186 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d6t8h"] Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.427770 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77e0321b-48ed-499e-ac29-e06a8cb770ec-catalog-content\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.427820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77e0321b-48ed-499e-ac29-e06a8cb770ec-utilities\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.427856 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7hgm\" (UniqueName: \"kubernetes.io/projected/77e0321b-48ed-499e-ac29-e06a8cb770ec-kube-api-access-p7hgm\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.428287 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77e0321b-48ed-499e-ac29-e06a8cb770ec-utilities\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.428533 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77e0321b-48ed-499e-ac29-e06a8cb770ec-catalog-content\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.448442 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7hgm\" (UniqueName: \"kubernetes.io/projected/77e0321b-48ed-499e-ac29-e06a8cb770ec-kube-api-access-p7hgm\") pod \"redhat-marketplace-ndg5n\" (UID: \"77e0321b-48ed-499e-ac29-e06a8cb770ec\") " pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.493395 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.531600 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-catalog-content\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.531850 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stdv8\" (UniqueName: \"kubernetes.io/projected/89fc34a9-051c-46cb-b745-879d6c018542-kube-api-access-stdv8\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.532029 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-utilities\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.632969 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-catalog-content\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.633006 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stdv8\" (UniqueName: \"kubernetes.io/projected/89fc34a9-051c-46cb-b745-879d6c018542-kube-api-access-stdv8\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.633064 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-utilities\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.633594 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-utilities\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.633621 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-catalog-content\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.653515 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c80e42d3-60e0-43cd-a2d5-455e7a61cd1e" path="/var/lib/kubelet/pods/c80e42d3-60e0-43cd-a2d5-455e7a61cd1e/volumes" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.654115 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da124515-51aa-4521-8a22-f5239f2afaf3" path="/var/lib/kubelet/pods/da124515-51aa-4521-8a22-f5239f2afaf3/volumes" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.655269 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stdv8\" (UniqueName: \"kubernetes.io/projected/89fc34a9-051c-46cb-b745-879d6c018542-kube-api-access-stdv8\") pod \"community-operators-d6t8h\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.670956 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ndg5n"] Nov 25 14:29:51 crc kubenswrapper[4879]: W1125 14:29:51.676331 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77e0321b_48ed_499e_ac29_e06a8cb770ec.slice/crio-9ed6216d2fa7150415d86ddb919bee7e4ac654c355e4975defc8a1ae667564ba WatchSource:0}: Error finding container 9ed6216d2fa7150415d86ddb919bee7e4ac654c355e4975defc8a1ae667564ba: Status 404 returned error can't find the container with id 9ed6216d2fa7150415d86ddb919bee7e4ac654c355e4975defc8a1ae667564ba Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.691570 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:29:51 crc kubenswrapper[4879]: I1125 14:29:51.876031 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d6t8h"] Nov 25 14:29:51 crc kubenswrapper[4879]: W1125 14:29:51.912672 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89fc34a9_051c_46cb_b745_879d6c018542.slice/crio-3a1f5ab4f07534f0c731d012b8852aa0a1a2ad5eb589121b9ded4e5635d2e8d4 WatchSource:0}: Error finding container 3a1f5ab4f07534f0c731d012b8852aa0a1a2ad5eb589121b9ded4e5635d2e8d4: Status 404 returned error can't find the container with id 3a1f5ab4f07534f0c731d012b8852aa0a1a2ad5eb589121b9ded4e5635d2e8d4 Nov 25 14:29:52 crc kubenswrapper[4879]: I1125 14:29:52.590404 4879 generic.go:334] "Generic (PLEG): container finished" podID="89fc34a9-051c-46cb-b745-879d6c018542" containerID="47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2" exitCode=0 Nov 25 14:29:52 crc kubenswrapper[4879]: I1125 14:29:52.590463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerDied","Data":"47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2"} Nov 25 14:29:52 crc kubenswrapper[4879]: I1125 14:29:52.591362 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerStarted","Data":"3a1f5ab4f07534f0c731d012b8852aa0a1a2ad5eb589121b9ded4e5635d2e8d4"} Nov 25 14:29:52 crc kubenswrapper[4879]: I1125 14:29:52.593183 4879 generic.go:334] "Generic (PLEG): container finished" podID="77e0321b-48ed-499e-ac29-e06a8cb770ec" containerID="a4d06f153d7c157a946a6496f5c8c31620d49c38fbb614416ac7a1c34bc6bc2b" exitCode=0 Nov 25 14:29:52 crc kubenswrapper[4879]: I1125 14:29:52.593232 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndg5n" event={"ID":"77e0321b-48ed-499e-ac29-e06a8cb770ec","Type":"ContainerDied","Data":"a4d06f153d7c157a946a6496f5c8c31620d49c38fbb614416ac7a1c34bc6bc2b"} Nov 25 14:29:52 crc kubenswrapper[4879]: I1125 14:29:52.593284 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndg5n" event={"ID":"77e0321b-48ed-499e-ac29-e06a8cb770ec","Type":"ContainerStarted","Data":"9ed6216d2fa7150415d86ddb919bee7e4ac654c355e4975defc8a1ae667564ba"} Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.570991 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cnspb"] Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.572069 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.574026 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.581078 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cnspb"] Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.658287 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfd5cef-7064-4111-9482-fd3714d4ee32-utilities\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.658348 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rctzg\" (UniqueName: \"kubernetes.io/projected/edfd5cef-7064-4111-9482-fd3714d4ee32-kube-api-access-rctzg\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.658432 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfd5cef-7064-4111-9482-fd3714d4ee32-catalog-content\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.760890 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfd5cef-7064-4111-9482-fd3714d4ee32-catalog-content\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.760954 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfd5cef-7064-4111-9482-fd3714d4ee32-utilities\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.761011 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rctzg\" (UniqueName: \"kubernetes.io/projected/edfd5cef-7064-4111-9482-fd3714d4ee32-kube-api-access-rctzg\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.761630 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfd5cef-7064-4111-9482-fd3714d4ee32-catalog-content\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.761821 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfd5cef-7064-4111-9482-fd3714d4ee32-utilities\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.769978 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fdl65"] Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.771336 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.773908 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.782461 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rctzg\" (UniqueName: \"kubernetes.io/projected/edfd5cef-7064-4111-9482-fd3714d4ee32-kube-api-access-rctzg\") pod \"certified-operators-cnspb\" (UID: \"edfd5cef-7064-4111-9482-fd3714d4ee32\") " pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.784922 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fdl65"] Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.861795 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-catalog-content\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.861855 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-utilities\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.861886 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj8pp\" (UniqueName: \"kubernetes.io/projected/a5e87bea-0848-477e-b207-0aaac4e1e63a-kube-api-access-xj8pp\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.927656 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.963678 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-catalog-content\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.964335 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-utilities\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.964580 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj8pp\" (UniqueName: \"kubernetes.io/projected/a5e87bea-0848-477e-b207-0aaac4e1e63a-kube-api-access-xj8pp\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.964654 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-utilities\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.964354 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-catalog-content\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:53 crc kubenswrapper[4879]: I1125 14:29:53.989770 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj8pp\" (UniqueName: \"kubernetes.io/projected/a5e87bea-0848-477e-b207-0aaac4e1e63a-kube-api-access-xj8pp\") pod \"redhat-operators-fdl65\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.110981 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cnspb"] Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.130061 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:29:54 crc kubenswrapper[4879]: W1125 14:29:54.134952 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedfd5cef_7064_4111_9482_fd3714d4ee32.slice/crio-16faf73a931d2eab478fcfd64f84bc14520c10360bde90259b289b1f00547f1d WatchSource:0}: Error finding container 16faf73a931d2eab478fcfd64f84bc14520c10360bde90259b289b1f00547f1d: Status 404 returned error can't find the container with id 16faf73a931d2eab478fcfd64f84bc14520c10360bde90259b289b1f00547f1d Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.296921 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fdl65"] Nov 25 14:29:54 crc kubenswrapper[4879]: W1125 14:29:54.356084 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5e87bea_0848_477e_b207_0aaac4e1e63a.slice/crio-eb9798244c4fe54c1379f3e2f7768a4ed62db2bf8e25963c812e4dacfa83e20d WatchSource:0}: Error finding container eb9798244c4fe54c1379f3e2f7768a4ed62db2bf8e25963c812e4dacfa83e20d: Status 404 returned error can't find the container with id eb9798244c4fe54c1379f3e2f7768a4ed62db2bf8e25963c812e4dacfa83e20d Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.613799 4879 generic.go:334] "Generic (PLEG): container finished" podID="77e0321b-48ed-499e-ac29-e06a8cb770ec" containerID="cac03e0d265e8b1f17c0a027fff35366a306693811598434d1db705c671dc49c" exitCode=0 Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.613867 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndg5n" event={"ID":"77e0321b-48ed-499e-ac29-e06a8cb770ec","Type":"ContainerDied","Data":"cac03e0d265e8b1f17c0a027fff35366a306693811598434d1db705c671dc49c"} Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.615307 4879 generic.go:334] "Generic (PLEG): container finished" podID="edfd5cef-7064-4111-9482-fd3714d4ee32" containerID="171a3230d6bc6ad4c05030008ac5a4be2945c8bc91ee90406806fe7818c8be76" exitCode=0 Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.615395 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cnspb" event={"ID":"edfd5cef-7064-4111-9482-fd3714d4ee32","Type":"ContainerDied","Data":"171a3230d6bc6ad4c05030008ac5a4be2945c8bc91ee90406806fe7818c8be76"} Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.615418 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cnspb" event={"ID":"edfd5cef-7064-4111-9482-fd3714d4ee32","Type":"ContainerStarted","Data":"16faf73a931d2eab478fcfd64f84bc14520c10360bde90259b289b1f00547f1d"} Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.620472 4879 generic.go:334] "Generic (PLEG): container finished" podID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerID="f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc" exitCode=0 Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.620689 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerDied","Data":"f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc"} Nov 25 14:29:54 crc kubenswrapper[4879]: I1125 14:29:54.620760 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerStarted","Data":"eb9798244c4fe54c1379f3e2f7768a4ed62db2bf8e25963c812e4dacfa83e20d"} Nov 25 14:29:55 crc kubenswrapper[4879]: I1125 14:29:55.634905 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerStarted","Data":"fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074"} Nov 25 14:29:55 crc kubenswrapper[4879]: I1125 14:29:55.637404 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cnspb" event={"ID":"edfd5cef-7064-4111-9482-fd3714d4ee32","Type":"ContainerStarted","Data":"5ff63547e312d7bf83c15e88bfe2c949ad1a9b25e7b2bd5ad60b46e5c5bb21d5"} Nov 25 14:29:55 crc kubenswrapper[4879]: I1125 14:29:55.641061 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ndg5n" event={"ID":"77e0321b-48ed-499e-ac29-e06a8cb770ec","Type":"ContainerStarted","Data":"7411fe8bd880c75b0a9193857dd3729f676dfd7b9cbcfb942a80721769c80e0e"} Nov 25 14:29:55 crc kubenswrapper[4879]: I1125 14:29:55.705605 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ndg5n" podStartSLOduration=2.281815972 podStartE2EDuration="4.705578269s" podCreationTimestamp="2025-11-25 14:29:51 +0000 UTC" firstStartedPulling="2025-11-25 14:29:52.595653407 +0000 UTC m=+284.199066478" lastFinishedPulling="2025-11-25 14:29:55.019415664 +0000 UTC m=+286.622828775" observedRunningTime="2025-11-25 14:29:55.697787576 +0000 UTC m=+287.301200647" watchObservedRunningTime="2025-11-25 14:29:55.705578269 +0000 UTC m=+287.308991340" Nov 25 14:29:56 crc kubenswrapper[4879]: I1125 14:29:56.647480 4879 generic.go:334] "Generic (PLEG): container finished" podID="edfd5cef-7064-4111-9482-fd3714d4ee32" containerID="5ff63547e312d7bf83c15e88bfe2c949ad1a9b25e7b2bd5ad60b46e5c5bb21d5" exitCode=0 Nov 25 14:29:56 crc kubenswrapper[4879]: I1125 14:29:56.647523 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cnspb" event={"ID":"edfd5cef-7064-4111-9482-fd3714d4ee32","Type":"ContainerDied","Data":"5ff63547e312d7bf83c15e88bfe2c949ad1a9b25e7b2bd5ad60b46e5c5bb21d5"} Nov 25 14:29:56 crc kubenswrapper[4879]: I1125 14:29:56.650179 4879 generic.go:334] "Generic (PLEG): container finished" podID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerID="fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074" exitCode=0 Nov 25 14:29:56 crc kubenswrapper[4879]: I1125 14:29:56.650250 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerDied","Data":"fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074"} Nov 25 14:29:57 crc kubenswrapper[4879]: I1125 14:29:57.656534 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cnspb" event={"ID":"edfd5cef-7064-4111-9482-fd3714d4ee32","Type":"ContainerStarted","Data":"63a2da1687fffdfc6efe08900e9d7bf18ae9a7b2be34f7e329be6b426bbc7926"} Nov 25 14:29:57 crc kubenswrapper[4879]: I1125 14:29:57.659355 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerStarted","Data":"9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0"} Nov 25 14:29:57 crc kubenswrapper[4879]: I1125 14:29:57.675936 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cnspb" podStartSLOduration=2.247398027 podStartE2EDuration="4.675918593s" podCreationTimestamp="2025-11-25 14:29:53 +0000 UTC" firstStartedPulling="2025-11-25 14:29:54.616633866 +0000 UTC m=+286.220046937" lastFinishedPulling="2025-11-25 14:29:57.045154432 +0000 UTC m=+288.648567503" observedRunningTime="2025-11-25 14:29:57.672669314 +0000 UTC m=+289.276082385" watchObservedRunningTime="2025-11-25 14:29:57.675918593 +0000 UTC m=+289.279331664" Nov 25 14:29:57 crc kubenswrapper[4879]: I1125 14:29:57.689271 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fdl65" podStartSLOduration=2.203904726 podStartE2EDuration="4.689257057s" podCreationTimestamp="2025-11-25 14:29:53 +0000 UTC" firstStartedPulling="2025-11-25 14:29:54.622542017 +0000 UTC m=+286.225955088" lastFinishedPulling="2025-11-25 14:29:57.107894348 +0000 UTC m=+288.711307419" observedRunningTime="2025-11-25 14:29:57.687096999 +0000 UTC m=+289.290510070" watchObservedRunningTime="2025-11-25 14:29:57.689257057 +0000 UTC m=+289.292670128" Nov 25 14:29:58 crc kubenswrapper[4879]: I1125 14:29:58.669513 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerStarted","Data":"e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507"} Nov 25 14:29:59 crc kubenswrapper[4879]: I1125 14:29:59.675781 4879 generic.go:334] "Generic (PLEG): container finished" podID="89fc34a9-051c-46cb-b745-879d6c018542" containerID="e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507" exitCode=0 Nov 25 14:29:59 crc kubenswrapper[4879]: I1125 14:29:59.675842 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerDied","Data":"e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507"} Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.146285 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6"] Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.148404 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.150057 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.151006 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.155485 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6"] Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.243974 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36609869-72a6-4813-bc8b-ea8c53cedf05-secret-volume\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.244326 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mslhz\" (UniqueName: \"kubernetes.io/projected/36609869-72a6-4813-bc8b-ea8c53cedf05-kube-api-access-mslhz\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.244459 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36609869-72a6-4813-bc8b-ea8c53cedf05-config-volume\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.346498 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36609869-72a6-4813-bc8b-ea8c53cedf05-secret-volume\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.346552 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mslhz\" (UniqueName: \"kubernetes.io/projected/36609869-72a6-4813-bc8b-ea8c53cedf05-kube-api-access-mslhz\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.346591 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36609869-72a6-4813-bc8b-ea8c53cedf05-config-volume\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.347490 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36609869-72a6-4813-bc8b-ea8c53cedf05-config-volume\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.351731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36609869-72a6-4813-bc8b-ea8c53cedf05-secret-volume\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.365382 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mslhz\" (UniqueName: \"kubernetes.io/projected/36609869-72a6-4813-bc8b-ea8c53cedf05-kube-api-access-mslhz\") pod \"collect-profiles-29401350-mn6j6\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.527865 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.691182 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerStarted","Data":"701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb"} Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.713529 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d6t8h" podStartSLOduration=2.195044361 podStartE2EDuration="9.713454856s" podCreationTimestamp="2025-11-25 14:29:51 +0000 UTC" firstStartedPulling="2025-11-25 14:29:52.592544173 +0000 UTC m=+284.195957244" lastFinishedPulling="2025-11-25 14:30:00.110954678 +0000 UTC m=+291.714367739" observedRunningTime="2025-11-25 14:30:00.713174738 +0000 UTC m=+292.316587819" watchObservedRunningTime="2025-11-25 14:30:00.713454856 +0000 UTC m=+292.316867927" Nov 25 14:30:00 crc kubenswrapper[4879]: I1125 14:30:00.724758 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6"] Nov 25 14:30:00 crc kubenswrapper[4879]: W1125 14:30:00.730974 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36609869_72a6_4813_bc8b_ea8c53cedf05.slice/crio-766c926c4e58f500dba69caf2d16c9006afa044638b354a22e3b5fcf9efc1be5 WatchSource:0}: Error finding container 766c926c4e58f500dba69caf2d16c9006afa044638b354a22e3b5fcf9efc1be5: Status 404 returned error can't find the container with id 766c926c4e58f500dba69caf2d16c9006afa044638b354a22e3b5fcf9efc1be5 Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.493583 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.493903 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.536435 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.691678 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.691723 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.697288 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" event={"ID":"36609869-72a6-4813-bc8b-ea8c53cedf05","Type":"ContainerStarted","Data":"766c926c4e58f500dba69caf2d16c9006afa044638b354a22e3b5fcf9efc1be5"} Nov 25 14:30:01 crc kubenswrapper[4879]: I1125 14:30:01.742529 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ndg5n" Nov 25 14:30:02 crc kubenswrapper[4879]: I1125 14:30:02.704219 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" event={"ID":"36609869-72a6-4813-bc8b-ea8c53cedf05","Type":"ContainerStarted","Data":"bfd36f9216857cd722ada09d2056012fe10804d8a8ffa1764defc018b6a0a8dc"} Nov 25 14:30:02 crc kubenswrapper[4879]: I1125 14:30:02.726338 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" podStartSLOduration=2.726322642 podStartE2EDuration="2.726322642s" podCreationTimestamp="2025-11-25 14:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:30:02.724575954 +0000 UTC m=+294.327989035" watchObservedRunningTime="2025-11-25 14:30:02.726322642 +0000 UTC m=+294.329735713" Nov 25 14:30:02 crc kubenswrapper[4879]: I1125 14:30:02.731255 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-d6t8h" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="registry-server" probeResult="failure" output=< Nov 25 14:30:02 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:30:02 crc kubenswrapper[4879]: > Nov 25 14:30:03 crc kubenswrapper[4879]: I1125 14:30:03.928751 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:30:03 crc kubenswrapper[4879]: I1125 14:30:03.928840 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:30:03 crc kubenswrapper[4879]: I1125 14:30:03.970880 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.131063 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.131137 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.171251 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.715434 4879 generic.go:334] "Generic (PLEG): container finished" podID="36609869-72a6-4813-bc8b-ea8c53cedf05" containerID="bfd36f9216857cd722ada09d2056012fe10804d8a8ffa1764defc018b6a0a8dc" exitCode=0 Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.715535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" event={"ID":"36609869-72a6-4813-bc8b-ea8c53cedf05","Type":"ContainerDied","Data":"bfd36f9216857cd722ada09d2056012fe10804d8a8ffa1764defc018b6a0a8dc"} Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.758378 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cnspb" Nov 25 14:30:04 crc kubenswrapper[4879]: I1125 14:30:04.768935 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 14:30:05 crc kubenswrapper[4879]: I1125 14:30:05.948418 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.128590 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36609869-72a6-4813-bc8b-ea8c53cedf05-secret-volume\") pod \"36609869-72a6-4813-bc8b-ea8c53cedf05\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.128722 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mslhz\" (UniqueName: \"kubernetes.io/projected/36609869-72a6-4813-bc8b-ea8c53cedf05-kube-api-access-mslhz\") pod \"36609869-72a6-4813-bc8b-ea8c53cedf05\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.128752 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36609869-72a6-4813-bc8b-ea8c53cedf05-config-volume\") pod \"36609869-72a6-4813-bc8b-ea8c53cedf05\" (UID: \"36609869-72a6-4813-bc8b-ea8c53cedf05\") " Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.129288 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36609869-72a6-4813-bc8b-ea8c53cedf05-config-volume" (OuterVolumeSpecName: "config-volume") pod "36609869-72a6-4813-bc8b-ea8c53cedf05" (UID: "36609869-72a6-4813-bc8b-ea8c53cedf05"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.134106 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36609869-72a6-4813-bc8b-ea8c53cedf05-kube-api-access-mslhz" (OuterVolumeSpecName: "kube-api-access-mslhz") pod "36609869-72a6-4813-bc8b-ea8c53cedf05" (UID: "36609869-72a6-4813-bc8b-ea8c53cedf05"). InnerVolumeSpecName "kube-api-access-mslhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.135076 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36609869-72a6-4813-bc8b-ea8c53cedf05-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "36609869-72a6-4813-bc8b-ea8c53cedf05" (UID: "36609869-72a6-4813-bc8b-ea8c53cedf05"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.230305 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/36609869-72a6-4813-bc8b-ea8c53cedf05-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.230350 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mslhz\" (UniqueName: \"kubernetes.io/projected/36609869-72a6-4813-bc8b-ea8c53cedf05-kube-api-access-mslhz\") on node \"crc\" DevicePath \"\"" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.230362 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/36609869-72a6-4813-bc8b-ea8c53cedf05-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.732341 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" event={"ID":"36609869-72a6-4813-bc8b-ea8c53cedf05","Type":"ContainerDied","Data":"766c926c4e58f500dba69caf2d16c9006afa044638b354a22e3b5fcf9efc1be5"} Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.732388 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="766c926c4e58f500dba69caf2d16c9006afa044638b354a22e3b5fcf9efc1be5" Nov 25 14:30:06 crc kubenswrapper[4879]: I1125 14:30:06.732454 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6" Nov 25 14:30:11 crc kubenswrapper[4879]: I1125 14:30:11.752285 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:30:11 crc kubenswrapper[4879]: I1125 14:30:11.796160 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:31:17 crc kubenswrapper[4879]: I1125 14:31:17.408710 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:31:17 crc kubenswrapper[4879]: I1125 14:31:17.409376 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.322873 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-h8b5b"] Nov 25 14:31:45 crc kubenswrapper[4879]: E1125 14:31:45.323659 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36609869-72a6-4813-bc8b-ea8c53cedf05" containerName="collect-profiles" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.323672 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="36609869-72a6-4813-bc8b-ea8c53cedf05" containerName="collect-profiles" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.323778 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="36609869-72a6-4813-bc8b-ea8c53cedf05" containerName="collect-profiles" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.324281 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.337534 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-h8b5b"] Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.480831 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.480894 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.480919 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6lpc\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-kube-api-access-m6lpc\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.480937 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-registry-tls\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.480981 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.481011 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-bound-sa-token\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.481029 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-registry-certificates\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.481049 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-trusted-ca\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.510587 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582405 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582483 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-bound-sa-token\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582513 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-registry-certificates\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582541 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-trusted-ca\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582586 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582610 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6lpc\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-kube-api-access-m6lpc\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.582633 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-registry-tls\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.584480 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-registry-certificates\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.584833 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.585389 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-trusted-ca\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.589441 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.589774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-registry-tls\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.601392 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-bound-sa-token\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.605220 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6lpc\" (UniqueName: \"kubernetes.io/projected/9ae38c74-ac8d-4dae-9ff5-1eafae69e89e-kube-api-access-m6lpc\") pod \"image-registry-66df7c8f76-h8b5b\" (UID: \"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e\") " pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.642083 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:45 crc kubenswrapper[4879]: I1125 14:31:45.810200 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-h8b5b"] Nov 25 14:31:46 crc kubenswrapper[4879]: I1125 14:31:46.428948 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" event={"ID":"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e","Type":"ContainerStarted","Data":"9f751e94a6ed562e4db89bf7a9b9936776403e8defb90b37be08ad7daa5361d1"} Nov 25 14:31:46 crc kubenswrapper[4879]: I1125 14:31:46.429028 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" event={"ID":"9ae38c74-ac8d-4dae-9ff5-1eafae69e89e","Type":"ContainerStarted","Data":"1a9cc05f9db76ea9507e0c748c73227f8375d81c300f06d9c98d0b1480366b54"} Nov 25 14:31:46 crc kubenswrapper[4879]: I1125 14:31:46.429180 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:31:46 crc kubenswrapper[4879]: I1125 14:31:46.461332 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" podStartSLOduration=1.461296187 podStartE2EDuration="1.461296187s" podCreationTimestamp="2025-11-25 14:31:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:31:46.45732776 +0000 UTC m=+398.060740831" watchObservedRunningTime="2025-11-25 14:31:46.461296187 +0000 UTC m=+398.064709288" Nov 25 14:31:47 crc kubenswrapper[4879]: I1125 14:31:47.409734 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:31:47 crc kubenswrapper[4879]: I1125 14:31:47.410229 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:32:05 crc kubenswrapper[4879]: I1125 14:32:05.655859 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-h8b5b" Nov 25 14:32:05 crc kubenswrapper[4879]: I1125 14:32:05.706462 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lh9w9"] Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.409481 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.410119 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.410194 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.410942 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a7d18fab85e4426190566b7565d13657d5f6365c513918e6ecead1fa02eff6cb"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.411034 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://a7d18fab85e4426190566b7565d13657d5f6365c513918e6ecead1fa02eff6cb" gracePeriod=600 Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.622679 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="a7d18fab85e4426190566b7565d13657d5f6365c513918e6ecead1fa02eff6cb" exitCode=0 Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.622734 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"a7d18fab85e4426190566b7565d13657d5f6365c513918e6ecead1fa02eff6cb"} Nov 25 14:32:17 crc kubenswrapper[4879]: I1125 14:32:17.623247 4879 scope.go:117] "RemoveContainer" containerID="52ef6dce39e80fbd79c937de9cbb915094d4966fc8cfa44da23fe82e791fd3ba" Nov 25 14:32:18 crc kubenswrapper[4879]: I1125 14:32:18.633523 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"15edc185488ff8f343000ea9a8092607411e7b2b070d2b6e62ac471cd7e356eb"} Nov 25 14:32:30 crc kubenswrapper[4879]: I1125 14:32:30.749932 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" podUID="b3baf06a-b99f-48bb-a21d-28ca86e3604f" containerName="registry" containerID="cri-o://d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670" gracePeriod=30 Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.151346 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.266930 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.267019 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-trusted-ca\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.267044 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-tls\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268047 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268096 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268160 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-certificates\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268197 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3baf06a-b99f-48bb-a21d-28ca86e3604f-installation-pull-secrets\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268215 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3baf06a-b99f-48bb-a21d-28ca86e3604f-ca-trust-extracted\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268256 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-bound-sa-token\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268411 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cr9m\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-kube-api-access-2cr9m\") pod \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\" (UID: \"b3baf06a-b99f-48bb-a21d-28ca86e3604f\") " Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268652 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.268680 4879 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.272739 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-kube-api-access-2cr9m" (OuterVolumeSpecName: "kube-api-access-2cr9m") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "kube-api-access-2cr9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.273098 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.273381 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.273681 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3baf06a-b99f-48bb-a21d-28ca86e3604f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.286633 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.288669 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3baf06a-b99f-48bb-a21d-28ca86e3604f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b3baf06a-b99f-48bb-a21d-28ca86e3604f" (UID: "b3baf06a-b99f-48bb-a21d-28ca86e3604f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.370357 4879 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.370396 4879 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3baf06a-b99f-48bb-a21d-28ca86e3604f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.370407 4879 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3baf06a-b99f-48bb-a21d-28ca86e3604f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.370415 4879 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.370425 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cr9m\" (UniqueName: \"kubernetes.io/projected/b3baf06a-b99f-48bb-a21d-28ca86e3604f-kube-api-access-2cr9m\") on node \"crc\" DevicePath \"\"" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.705288 4879 generic.go:334] "Generic (PLEG): container finished" podID="b3baf06a-b99f-48bb-a21d-28ca86e3604f" containerID="d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670" exitCode=0 Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.705328 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" event={"ID":"b3baf06a-b99f-48bb-a21d-28ca86e3604f","Type":"ContainerDied","Data":"d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670"} Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.705354 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" event={"ID":"b3baf06a-b99f-48bb-a21d-28ca86e3604f","Type":"ContainerDied","Data":"8c10c35ee914a0167aead162485d4962d9f9bf97458d4a2e5f053ace230b565d"} Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.705370 4879 scope.go:117] "RemoveContainer" containerID="d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.705457 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-lh9w9" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.735278 4879 scope.go:117] "RemoveContainer" containerID="d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.740210 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lh9w9"] Nov 25 14:32:31 crc kubenswrapper[4879]: E1125 14:32:31.744570 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670\": container with ID starting with d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670 not found: ID does not exist" containerID="d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.744632 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670"} err="failed to get container status \"d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670\": rpc error: code = NotFound desc = could not find container \"d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670\": container with ID starting with d4506cce7d044258775b37200e4d9d154a7c37d55af1d763ed10322c24fa0670 not found: ID does not exist" Nov 25 14:32:31 crc kubenswrapper[4879]: I1125 14:32:31.747192 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-lh9w9"] Nov 25 14:32:33 crc kubenswrapper[4879]: I1125 14:32:33.652384 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3baf06a-b99f-48bb-a21d-28ca86e3604f" path="/var/lib/kubelet/pods/b3baf06a-b99f-48bb-a21d-28ca86e3604f/volumes" Nov 25 14:34:17 crc kubenswrapper[4879]: I1125 14:34:17.409023 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:34:17 crc kubenswrapper[4879]: I1125 14:34:17.409780 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:34:47 crc kubenswrapper[4879]: I1125 14:34:47.409387 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:34:47 crc kubenswrapper[4879]: I1125 14:34:47.409949 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.408711 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.409490 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.409563 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.410319 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"15edc185488ff8f343000ea9a8092607411e7b2b070d2b6e62ac471cd7e356eb"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.410423 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://15edc185488ff8f343000ea9a8092607411e7b2b070d2b6e62ac471cd7e356eb" gracePeriod=600 Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.731461 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="15edc185488ff8f343000ea9a8092607411e7b2b070d2b6e62ac471cd7e356eb" exitCode=0 Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.731512 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"15edc185488ff8f343000ea9a8092607411e7b2b070d2b6e62ac471cd7e356eb"} Nov 25 14:35:17 crc kubenswrapper[4879]: I1125 14:35:17.731554 4879 scope.go:117] "RemoveContainer" containerID="a7d18fab85e4426190566b7565d13657d5f6365c513918e6ecead1fa02eff6cb" Nov 25 14:35:18 crc kubenswrapper[4879]: I1125 14:35:18.738579 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"10f38710f0a2009721f3eed24465e5c8e6ced367d4a502ca16764d82d8c5b0b2"} Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.304246 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f9p"] Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.306001 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-controller" containerID="cri-o://ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.306155 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="northd" containerID="cri-o://61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.306378 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.306433 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-node" containerID="cri-o://8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.306464 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-acl-logging" containerID="cri-o://fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.306546 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="sbdb" containerID="cri-o://34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.305995 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="nbdb" containerID="cri-o://181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" gracePeriod=30 Nov 25 14:36:56 crc kubenswrapper[4879]: I1125 14:36:56.376912 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" containerID="cri-o://a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" gracePeriod=30 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.221638 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/3.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.224944 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovn-acl-logging/0.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.226500 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovn-controller/0.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.227101 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.254619 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/2.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.255226 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/1.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.255287 4879 generic.go:334] "Generic (PLEG): container finished" podID="f1eafdec-4c5a-4e91-97b4-a117c35838d4" containerID="04343c2e95b524805d649e04e2af6186367823857f00f9c90346cea5ad99b1a7" exitCode=2 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.255376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerDied","Data":"04343c2e95b524805d649e04e2af6186367823857f00f9c90346cea5ad99b1a7"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.255430 4879 scope.go:117] "RemoveContainer" containerID="127ad9cd6c7acd60232e0bb7e9a394725328892b7bb770b60a95a72162605255" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.258944 4879 scope.go:117] "RemoveContainer" containerID="04343c2e95b524805d649e04e2af6186367823857f00f9c90346cea5ad99b1a7" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.259457 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8m8g8_openshift-multus(f1eafdec-4c5a-4e91-97b4-a117c35838d4)\"" pod="openshift-multus/multus-8m8g8" podUID="f1eafdec-4c5a-4e91-97b4-a117c35838d4" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.269612 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovnkube-controller/3.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.271683 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovn-acl-logging/0.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272356 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-g7f9p_5301133b-1830-45bc-a55e-7c3e97907bb9/ovn-controller/0.log" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272878 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" exitCode=0 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272905 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" exitCode=0 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272917 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" exitCode=0 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272927 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" exitCode=0 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272935 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" exitCode=0 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272943 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" exitCode=0 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272950 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" exitCode=143 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272956 4879 generic.go:334] "Generic (PLEG): container finished" podID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" exitCode=143 Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.272978 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273049 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273091 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273105 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273117 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273146 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273159 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273171 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273179 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273252 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273260 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273267 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273272 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273278 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273284 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273290 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273299 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273309 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273316 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273322 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273328 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273334 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273339 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273345 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273352 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273358 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273364 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273372 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273380 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273389 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273394 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273400 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273406 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273413 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273421 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273427 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273433 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273440 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273448 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" event={"ID":"5301133b-1830-45bc-a55e-7c3e97907bb9","Type":"ContainerDied","Data":"7a8a20c2820e9c7250b9ce0fd0df521c9cdd98d5cad5bd9c5d36adbaae7d1076"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273458 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273466 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273473 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273480 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273486 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273492 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273497 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273504 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273510 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273516 4879 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.273609 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g7f9p" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290478 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jb8fn"] Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290722 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-ovn-metrics" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290743 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-ovn-metrics" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290759 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-node" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290767 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-node" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290779 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290787 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290795 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290805 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290815 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290823 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290836 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="northd" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290844 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="northd" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290858 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3baf06a-b99f-48bb-a21d-28ca86e3604f" containerName="registry" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290865 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3baf06a-b99f-48bb-a21d-28ca86e3604f" containerName="registry" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290874 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kubecfg-setup" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290882 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kubecfg-setup" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290895 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290903 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290918 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="nbdb" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290926 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="nbdb" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290938 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="sbdb" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290945 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="sbdb" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.290957 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-acl-logging" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.290965 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-acl-logging" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291082 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-node" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291099 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="kube-rbac-proxy-ovn-metrics" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291111 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291137 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291150 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291159 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291169 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovn-acl-logging" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291180 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="nbdb" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291190 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="sbdb" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291198 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="northd" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291207 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3baf06a-b99f-48bb-a21d-28ca86e3604f" containerName="registry" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.291316 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291326 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.291338 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291346 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291452 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.291667 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" containerName="ovnkube-controller" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.293468 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.303796 4879 scope.go:117] "RemoveContainer" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308273 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-ovn\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308306 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7zrd\" (UniqueName: \"kubernetes.io/projected/5301133b-1830-45bc-a55e-7c3e97907bb9-kube-api-access-k7zrd\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308324 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5301133b-1830-45bc-a55e-7c3e97907bb9-ovn-node-metrics-cert\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308339 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-systemd-units\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308358 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-config\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308374 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-netns\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-etc-openvswitch\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308417 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-var-lib-openvswitch\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308451 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-netd\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308467 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-script-lib\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308490 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308513 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-ovn-kubernetes\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308532 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-env-overrides\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308560 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-openvswitch\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308574 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-kubelet\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308590 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-bin\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308606 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-slash\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308621 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-systemd\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308636 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-log-socket\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308652 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-node-log\") pod \"5301133b-1830-45bc-a55e-7c3e97907bb9\" (UID: \"5301133b-1830-45bc-a55e-7c3e97907bb9\") " Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308771 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-var-lib-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308803 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-cni-bin\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308838 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-env-overrides\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308865 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-kubelet\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308881 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-log-socket\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308907 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-ovn\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308926 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-cni-netd\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308940 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovnkube-config\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308955 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovn-node-metrics-cert\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308969 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-systemd\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308989 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-node-log\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309042 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-slash\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309063 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdmfs\" (UniqueName: \"kubernetes.io/projected/d7999ab0-9257-4a07-afe7-6518ff44f0ef-kube-api-access-sdmfs\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309078 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-run-ovn-kubernetes\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309092 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-systemd-units\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309108 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309134 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovnkube-script-lib\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309150 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-etc-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309168 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309188 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-run-netns\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.308366 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309678 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.309695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310027 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310043 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310086 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310105 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310136 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310182 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310474 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-slash" (OuterVolumeSpecName: "host-slash") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310495 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310533 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310561 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-node-log" (OuterVolumeSpecName: "node-log") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310562 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-log-socket" (OuterVolumeSpecName: "log-socket") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310586 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310566 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.310655 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.318129 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5301133b-1830-45bc-a55e-7c3e97907bb9-kube-api-access-k7zrd" (OuterVolumeSpecName: "kube-api-access-k7zrd") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "kube-api-access-k7zrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.318203 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5301133b-1830-45bc-a55e-7c3e97907bb9-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.323746 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.324377 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "5301133b-1830-45bc-a55e-7c3e97907bb9" (UID: "5301133b-1830-45bc-a55e-7c3e97907bb9"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.347027 4879 scope.go:117] "RemoveContainer" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.358487 4879 scope.go:117] "RemoveContainer" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.371475 4879 scope.go:117] "RemoveContainer" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.386233 4879 scope.go:117] "RemoveContainer" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.396633 4879 scope.go:117] "RemoveContainer" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409508 4879 scope.go:117] "RemoveContainer" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409773 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdmfs\" (UniqueName: \"kubernetes.io/projected/d7999ab0-9257-4a07-afe7-6518ff44f0ef-kube-api-access-sdmfs\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409811 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-run-ovn-kubernetes\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409837 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-systemd-units\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409861 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409883 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovnkube-script-lib\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409908 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-etc-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409926 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-run-ovn-kubernetes\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409920 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-systemd-units\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409929 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.409973 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410010 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410147 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-run-netns\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410117 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-etc-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410193 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-var-lib-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410255 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-run-netns\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410269 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-cni-bin\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410297 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-cni-bin\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410227 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-var-lib-openvswitch\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410319 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-env-overrides\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410361 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-kubelet\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410392 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-log-socket\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410423 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-ovn\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410461 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-cni-netd\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410491 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovnkube-config\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410518 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-kubelet\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovn-node-metrics-cert\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410569 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-ovn\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410614 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-log-socket\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410656 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-systemd\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410739 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-node-log\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410754 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-env-overrides\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-run-systemd\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410744 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovnkube-script-lib\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410662 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-cni-netd\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410815 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-slash\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410806 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-node-log\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410860 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d7999ab0-9257-4a07-afe7-6518ff44f0ef-host-slash\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410944 4879 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410958 4879 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410971 4879 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410983 4879 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.410993 4879 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411004 4879 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411017 4879 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-slash\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411028 4879 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411037 4879 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-log-socket\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411049 4879 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-node-log\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411058 4879 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411067 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7zrd\" (UniqueName: \"kubernetes.io/projected/5301133b-1830-45bc-a55e-7c3e97907bb9-kube-api-access-k7zrd\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411077 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5301133b-1830-45bc-a55e-7c3e97907bb9-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411085 4879 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411095 4879 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411102 4879 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411110 4879 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411118 4879 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411142 4879 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/5301133b-1830-45bc-a55e-7c3e97907bb9-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411151 4879 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/5301133b-1830-45bc-a55e-7c3e97907bb9-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.411631 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovnkube-config\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.415274 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d7999ab0-9257-4a07-afe7-6518ff44f0ef-ovn-node-metrics-cert\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.431768 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdmfs\" (UniqueName: \"kubernetes.io/projected/d7999ab0-9257-4a07-afe7-6518ff44f0ef-kube-api-access-sdmfs\") pod \"ovnkube-node-jb8fn\" (UID: \"d7999ab0-9257-4a07-afe7-6518ff44f0ef\") " pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.460115 4879 scope.go:117] "RemoveContainer" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.474521 4879 scope.go:117] "RemoveContainer" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.488464 4879 scope.go:117] "RemoveContainer" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.488939 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": container with ID starting with a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2 not found: ID does not exist" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.488979 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} err="failed to get container status \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": rpc error: code = NotFound desc = could not find container \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": container with ID starting with a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.489006 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.489421 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": container with ID starting with eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99 not found: ID does not exist" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.489454 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} err="failed to get container status \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": rpc error: code = NotFound desc = could not find container \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": container with ID starting with eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.489478 4879 scope.go:117] "RemoveContainer" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.489690 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": container with ID starting with 34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7 not found: ID does not exist" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.489719 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} err="failed to get container status \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": rpc error: code = NotFound desc = could not find container \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": container with ID starting with 34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.489732 4879 scope.go:117] "RemoveContainer" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.490029 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": container with ID starting with 181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef not found: ID does not exist" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.490074 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} err="failed to get container status \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": rpc error: code = NotFound desc = could not find container \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": container with ID starting with 181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.490105 4879 scope.go:117] "RemoveContainer" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.490739 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": container with ID starting with 61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34 not found: ID does not exist" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.490766 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} err="failed to get container status \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": rpc error: code = NotFound desc = could not find container \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": container with ID starting with 61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.490782 4879 scope.go:117] "RemoveContainer" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.491212 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": container with ID starting with 6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293 not found: ID does not exist" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.491239 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} err="failed to get container status \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": rpc error: code = NotFound desc = could not find container \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": container with ID starting with 6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.491260 4879 scope.go:117] "RemoveContainer" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.491495 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": container with ID starting with 8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9 not found: ID does not exist" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.491522 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} err="failed to get container status \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": rpc error: code = NotFound desc = could not find container \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": container with ID starting with 8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.491549 4879 scope.go:117] "RemoveContainer" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.491794 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": container with ID starting with fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30 not found: ID does not exist" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.491831 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} err="failed to get container status \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": rpc error: code = NotFound desc = could not find container \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": container with ID starting with fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.491857 4879 scope.go:117] "RemoveContainer" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.492136 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": container with ID starting with ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816 not found: ID does not exist" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492166 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} err="failed to get container status \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": rpc error: code = NotFound desc = could not find container \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": container with ID starting with ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492186 4879 scope.go:117] "RemoveContainer" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" Nov 25 14:36:57 crc kubenswrapper[4879]: E1125 14:36:57.492411 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": container with ID starting with bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f not found: ID does not exist" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492435 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} err="failed to get container status \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": rpc error: code = NotFound desc = could not find container \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": container with ID starting with bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492457 4879 scope.go:117] "RemoveContainer" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492680 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} err="failed to get container status \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": rpc error: code = NotFound desc = could not find container \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": container with ID starting with a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492705 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492884 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} err="failed to get container status \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": rpc error: code = NotFound desc = could not find container \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": container with ID starting with eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.492907 4879 scope.go:117] "RemoveContainer" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493145 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} err="failed to get container status \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": rpc error: code = NotFound desc = could not find container \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": container with ID starting with 34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493169 4879 scope.go:117] "RemoveContainer" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493391 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} err="failed to get container status \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": rpc error: code = NotFound desc = could not find container \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": container with ID starting with 181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493416 4879 scope.go:117] "RemoveContainer" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493607 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} err="failed to get container status \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": rpc error: code = NotFound desc = could not find container \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": container with ID starting with 61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493631 4879 scope.go:117] "RemoveContainer" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.493989 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} err="failed to get container status \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": rpc error: code = NotFound desc = could not find container \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": container with ID starting with 6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494009 4879 scope.go:117] "RemoveContainer" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494262 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} err="failed to get container status \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": rpc error: code = NotFound desc = could not find container \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": container with ID starting with 8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494282 4879 scope.go:117] "RemoveContainer" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494705 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} err="failed to get container status \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": rpc error: code = NotFound desc = could not find container \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": container with ID starting with fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494736 4879 scope.go:117] "RemoveContainer" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494973 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} err="failed to get container status \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": rpc error: code = NotFound desc = could not find container \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": container with ID starting with ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.494989 4879 scope.go:117] "RemoveContainer" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.495429 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} err="failed to get container status \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": rpc error: code = NotFound desc = could not find container \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": container with ID starting with bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.495453 4879 scope.go:117] "RemoveContainer" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.495658 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} err="failed to get container status \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": rpc error: code = NotFound desc = could not find container \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": container with ID starting with a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.495730 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496022 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} err="failed to get container status \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": rpc error: code = NotFound desc = could not find container \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": container with ID starting with eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496045 4879 scope.go:117] "RemoveContainer" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496264 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} err="failed to get container status \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": rpc error: code = NotFound desc = could not find container \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": container with ID starting with 34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496285 4879 scope.go:117] "RemoveContainer" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496492 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} err="failed to get container status \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": rpc error: code = NotFound desc = could not find container \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": container with ID starting with 181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496512 4879 scope.go:117] "RemoveContainer" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496730 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} err="failed to get container status \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": rpc error: code = NotFound desc = could not find container \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": container with ID starting with 61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496757 4879 scope.go:117] "RemoveContainer" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.496984 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} err="failed to get container status \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": rpc error: code = NotFound desc = could not find container \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": container with ID starting with 6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497003 4879 scope.go:117] "RemoveContainer" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497245 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} err="failed to get container status \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": rpc error: code = NotFound desc = could not find container \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": container with ID starting with 8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497266 4879 scope.go:117] "RemoveContainer" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497460 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} err="failed to get container status \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": rpc error: code = NotFound desc = could not find container \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": container with ID starting with fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497476 4879 scope.go:117] "RemoveContainer" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497666 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} err="failed to get container status \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": rpc error: code = NotFound desc = could not find container \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": container with ID starting with ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497690 4879 scope.go:117] "RemoveContainer" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497895 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} err="failed to get container status \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": rpc error: code = NotFound desc = could not find container \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": container with ID starting with bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.497917 4879 scope.go:117] "RemoveContainer" containerID="a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498224 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2"} err="failed to get container status \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": rpc error: code = NotFound desc = could not find container \"a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2\": container with ID starting with a2b2f118a11fd0c926895e709afe0e7ed92fdd5703c67988ad81920b125956f2 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498242 4879 scope.go:117] "RemoveContainer" containerID="eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498444 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99"} err="failed to get container status \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": rpc error: code = NotFound desc = could not find container \"eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99\": container with ID starting with eaeb029cebc6a7eea27e6535651300a2798b50caa30f0e7ef1656dbf5e72ae99 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498467 4879 scope.go:117] "RemoveContainer" containerID="34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498701 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7"} err="failed to get container status \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": rpc error: code = NotFound desc = could not find container \"34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7\": container with ID starting with 34fee4d56eea47f8737b85ba2b385efaf02fef8a3b37f270e9ce2031252999d7 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498724 4879 scope.go:117] "RemoveContainer" containerID="181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498950 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef"} err="failed to get container status \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": rpc error: code = NotFound desc = could not find container \"181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef\": container with ID starting with 181a73f3f85b3c61037792c477927fcb1bccd6875e84dda2019f9b5032bf31ef not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.498973 4879 scope.go:117] "RemoveContainer" containerID="61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499191 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34"} err="failed to get container status \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": rpc error: code = NotFound desc = could not find container \"61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34\": container with ID starting with 61da62b82ee076b136cfa156c98499250755a0b29b5290b79886c8a705649d34 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499212 4879 scope.go:117] "RemoveContainer" containerID="6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499433 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293"} err="failed to get container status \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": rpc error: code = NotFound desc = could not find container \"6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293\": container with ID starting with 6de2ccf17cdd248e6a689ef2b1c55469e70de553127af32554136d3561c33293 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499451 4879 scope.go:117] "RemoveContainer" containerID="8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499649 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9"} err="failed to get container status \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": rpc error: code = NotFound desc = could not find container \"8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9\": container with ID starting with 8feb3e6301d587207eb5a243e8d24a2736b761c4bf79a9c28107309d1f6818b9 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499668 4879 scope.go:117] "RemoveContainer" containerID="fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499888 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30"} err="failed to get container status \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": rpc error: code = NotFound desc = could not find container \"fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30\": container with ID starting with fbcc6d70df603d6b1d4e5280f05d2776f4a3b8f723cf152ebab722fa35393e30 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.499908 4879 scope.go:117] "RemoveContainer" containerID="ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.500166 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816"} err="failed to get container status \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": rpc error: code = NotFound desc = could not find container \"ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816\": container with ID starting with ecf3f2f243bd6a219b1f35f38db32db23bbb863af752c121e9d34cb862873816 not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.500187 4879 scope.go:117] "RemoveContainer" containerID="bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.500450 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f"} err="failed to get container status \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": rpc error: code = NotFound desc = could not find container \"bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f\": container with ID starting with bb9ca9475f57211654e6e7f4a3f74285f87f4527c0e6182dabfef48abf62fe9f not found: ID does not exist" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.608157 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f9p"] Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.608350 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.610786 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g7f9p"] Nov 25 14:36:57 crc kubenswrapper[4879]: I1125 14:36:57.656082 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5301133b-1830-45bc-a55e-7c3e97907bb9" path="/var/lib/kubelet/pods/5301133b-1830-45bc-a55e-7c3e97907bb9/volumes" Nov 25 14:36:58 crc kubenswrapper[4879]: I1125 14:36:58.280205 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/2.log" Nov 25 14:36:58 crc kubenswrapper[4879]: I1125 14:36:58.283074 4879 generic.go:334] "Generic (PLEG): container finished" podID="d7999ab0-9257-4a07-afe7-6518ff44f0ef" containerID="eb65e7b8191210d41e347d5061282926a09c8aab3fbd7fd96fc39954dee0a9bb" exitCode=0 Nov 25 14:36:58 crc kubenswrapper[4879]: I1125 14:36:58.283105 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerDied","Data":"eb65e7b8191210d41e347d5061282926a09c8aab3fbd7fd96fc39954dee0a9bb"} Nov 25 14:36:58 crc kubenswrapper[4879]: I1125 14:36:58.283274 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"49472ed0de86b70b059aa25085bd54419787d6e0765ec56ed97e053f3cbd1260"} Nov 25 14:36:59 crc kubenswrapper[4879]: I1125 14:36:59.291642 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"6bdf74f79b29f5afa06611997cb41c1858fffd4290003f54743c40526d3bf266"} Nov 25 14:36:59 crc kubenswrapper[4879]: I1125 14:36:59.292178 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"3f71f8945d74607cf78d9e01e535797fd02441e5cd462e600f588bafb7aef83d"} Nov 25 14:36:59 crc kubenswrapper[4879]: I1125 14:36:59.292195 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"575f0178c5651f2301096831b6f987e6a4738ef880538254d82efab493f09274"} Nov 25 14:36:59 crc kubenswrapper[4879]: I1125 14:36:59.292208 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"0a05f5672e534ece9a257d4c08d4965b64490695c72a182464e41cc296fa334d"} Nov 25 14:37:00 crc kubenswrapper[4879]: I1125 14:37:00.304983 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"4c6f6c5bc7bcda4c79831baab9fd4b902be3e517a0826a08e7b570789a6b706d"} Nov 25 14:37:00 crc kubenswrapper[4879]: I1125 14:37:00.305514 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"84bfea25d94a8d727a600207af7a14ee6ee21184fabfba5b8c84fbc1a90d75c8"} Nov 25 14:37:02 crc kubenswrapper[4879]: I1125 14:37:02.319940 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"40a05887e797744e0bc6dda4427d2edc0ab78cb41257328b2cb2940af9f1e327"} Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.179656 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-kvkpx"] Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.181103 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.184018 4879 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-47r78" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.184094 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.185831 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.185914 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.201388 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/73a2604b-9d34-4dee-ab7a-1741dcda9c58-node-mnt\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.201443 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzxlz\" (UniqueName: \"kubernetes.io/projected/73a2604b-9d34-4dee-ab7a-1741dcda9c58-kube-api-access-zzxlz\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.201705 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/73a2604b-9d34-4dee-ab7a-1741dcda9c58-crc-storage\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.303111 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/73a2604b-9d34-4dee-ab7a-1741dcda9c58-crc-storage\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.303246 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/73a2604b-9d34-4dee-ab7a-1741dcda9c58-node-mnt\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.303276 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzxlz\" (UniqueName: \"kubernetes.io/projected/73a2604b-9d34-4dee-ab7a-1741dcda9c58-kube-api-access-zzxlz\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.303561 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/73a2604b-9d34-4dee-ab7a-1741dcda9c58-node-mnt\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.305338 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/73a2604b-9d34-4dee-ab7a-1741dcda9c58-crc-storage\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.326322 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzxlz\" (UniqueName: \"kubernetes.io/projected/73a2604b-9d34-4dee-ab7a-1741dcda9c58-kube-api-access-zzxlz\") pod \"crc-storage-crc-kvkpx\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.333278 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" event={"ID":"d7999ab0-9257-4a07-afe7-6518ff44f0ef","Type":"ContainerStarted","Data":"f84b1ea46897a95f52cef2113dd8f9a151e6adcea5c1fa80805285d45fe314b6"} Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.333717 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.371318 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" podStartSLOduration=7.371301668 podStartE2EDuration="7.371301668s" podCreationTimestamp="2025-11-25 14:36:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:37:04.36767588 +0000 UTC m=+715.971088971" watchObservedRunningTime="2025-11-25 14:37:04.371301668 +0000 UTC m=+715.974714739" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.380869 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.506113 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: E1125 14:37:04.524704 4879 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(96c4d6d98a2f67f511a60664572ad21abee610ec6b0340d53db387ed847c2edd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 14:37:04 crc kubenswrapper[4879]: E1125 14:37:04.524781 4879 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(96c4d6d98a2f67f511a60664572ad21abee610ec6b0340d53db387ed847c2edd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: E1125 14:37:04.524803 4879 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(96c4d6d98a2f67f511a60664572ad21abee610ec6b0340d53db387ed847c2edd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:04 crc kubenswrapper[4879]: E1125 14:37:04.524868 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-kvkpx_crc-storage(73a2604b-9d34-4dee-ab7a-1741dcda9c58)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-kvkpx_crc-storage(73a2604b-9d34-4dee-ab7a-1741dcda9c58)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(96c4d6d98a2f67f511a60664572ad21abee610ec6b0340d53db387ed847c2edd): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-kvkpx" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" Nov 25 14:37:04 crc kubenswrapper[4879]: I1125 14:37:04.853788 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kvkpx"] Nov 25 14:37:05 crc kubenswrapper[4879]: I1125 14:37:05.339320 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:05 crc kubenswrapper[4879]: I1125 14:37:05.339729 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:05 crc kubenswrapper[4879]: I1125 14:37:05.340542 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:37:05 crc kubenswrapper[4879]: I1125 14:37:05.340583 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:37:05 crc kubenswrapper[4879]: I1125 14:37:05.374198 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:37:05 crc kubenswrapper[4879]: E1125 14:37:05.376787 4879 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(6da3c81a72f89c5ec29a92ffab6095961d4b18eeb40c190b2dc2acf801895792): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 14:37:05 crc kubenswrapper[4879]: E1125 14:37:05.376858 4879 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(6da3c81a72f89c5ec29a92ffab6095961d4b18eeb40c190b2dc2acf801895792): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:05 crc kubenswrapper[4879]: E1125 14:37:05.376895 4879 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(6da3c81a72f89c5ec29a92ffab6095961d4b18eeb40c190b2dc2acf801895792): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:05 crc kubenswrapper[4879]: E1125 14:37:05.377420 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-kvkpx_crc-storage(73a2604b-9d34-4dee-ab7a-1741dcda9c58)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-kvkpx_crc-storage(73a2604b-9d34-4dee-ab7a-1741dcda9c58)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(6da3c81a72f89c5ec29a92ffab6095961d4b18eeb40c190b2dc2acf801895792): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-kvkpx" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" Nov 25 14:37:10 crc kubenswrapper[4879]: I1125 14:37:10.644875 4879 scope.go:117] "RemoveContainer" containerID="04343c2e95b524805d649e04e2af6186367823857f00f9c90346cea5ad99b1a7" Nov 25 14:37:10 crc kubenswrapper[4879]: E1125 14:37:10.645862 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8m8g8_openshift-multus(f1eafdec-4c5a-4e91-97b4-a117c35838d4)\"" pod="openshift-multus/multus-8m8g8" podUID="f1eafdec-4c5a-4e91-97b4-a117c35838d4" Nov 25 14:37:19 crc kubenswrapper[4879]: I1125 14:37:19.644646 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:19 crc kubenswrapper[4879]: I1125 14:37:19.650172 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:19 crc kubenswrapper[4879]: E1125 14:37:19.678872 4879 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(f5968c2c32f72d6237799a0e774a1f5c90b420c71b7f19589129c68e247c376a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 14:37:19 crc kubenswrapper[4879]: E1125 14:37:19.678961 4879 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(f5968c2c32f72d6237799a0e774a1f5c90b420c71b7f19589129c68e247c376a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:19 crc kubenswrapper[4879]: E1125 14:37:19.678987 4879 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(f5968c2c32f72d6237799a0e774a1f5c90b420c71b7f19589129c68e247c376a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:19 crc kubenswrapper[4879]: E1125 14:37:19.679033 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-kvkpx_crc-storage(73a2604b-9d34-4dee-ab7a-1741dcda9c58)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-kvkpx_crc-storage(73a2604b-9d34-4dee-ab7a-1741dcda9c58)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kvkpx_crc-storage_73a2604b-9d34-4dee-ab7a-1741dcda9c58_0(f5968c2c32f72d6237799a0e774a1f5c90b420c71b7f19589129c68e247c376a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-kvkpx" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" Nov 25 14:37:25 crc kubenswrapper[4879]: I1125 14:37:25.644854 4879 scope.go:117] "RemoveContainer" containerID="04343c2e95b524805d649e04e2af6186367823857f00f9c90346cea5ad99b1a7" Nov 25 14:37:26 crc kubenswrapper[4879]: I1125 14:37:26.516097 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8m8g8_f1eafdec-4c5a-4e91-97b4-a117c35838d4/kube-multus/2.log" Nov 25 14:37:26 crc kubenswrapper[4879]: I1125 14:37:26.516929 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8m8g8" event={"ID":"f1eafdec-4c5a-4e91-97b4-a117c35838d4","Type":"ContainerStarted","Data":"4d9130cb330173d541ebc23a5d68c3bac90bb2ac15b6ed2906719863b393458e"} Nov 25 14:37:27 crc kubenswrapper[4879]: I1125 14:37:27.634596 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jb8fn" Nov 25 14:37:33 crc kubenswrapper[4879]: I1125 14:37:33.643945 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:33 crc kubenswrapper[4879]: I1125 14:37:33.644747 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:34 crc kubenswrapper[4879]: I1125 14:37:34.058045 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kvkpx"] Nov 25 14:37:34 crc kubenswrapper[4879]: I1125 14:37:34.064199 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 14:37:34 crc kubenswrapper[4879]: I1125 14:37:34.569239 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kvkpx" event={"ID":"73a2604b-9d34-4dee-ab7a-1741dcda9c58","Type":"ContainerStarted","Data":"af165bfc1847c859e2dacff5a9cf5ba7591b674b24d8d26e59000ec2dcf8e316"} Nov 25 14:37:36 crc kubenswrapper[4879]: I1125 14:37:36.581184 4879 generic.go:334] "Generic (PLEG): container finished" podID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" containerID="300d5f6e2ac3e797b8e4321551487c763a497386a386fa1f6fac1fbf246a1394" exitCode=0 Nov 25 14:37:36 crc kubenswrapper[4879]: I1125 14:37:36.581237 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kvkpx" event={"ID":"73a2604b-9d34-4dee-ab7a-1741dcda9c58","Type":"ContainerDied","Data":"300d5f6e2ac3e797b8e4321551487c763a497386a386fa1f6fac1fbf246a1394"} Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.785491 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.934315 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzxlz\" (UniqueName: \"kubernetes.io/projected/73a2604b-9d34-4dee-ab7a-1741dcda9c58-kube-api-access-zzxlz\") pod \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.934362 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/73a2604b-9d34-4dee-ab7a-1741dcda9c58-node-mnt\") pod \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.934479 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/73a2604b-9d34-4dee-ab7a-1741dcda9c58-crc-storage\") pod \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\" (UID: \"73a2604b-9d34-4dee-ab7a-1741dcda9c58\") " Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.935019 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/73a2604b-9d34-4dee-ab7a-1741dcda9c58-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "73a2604b-9d34-4dee-ab7a-1741dcda9c58" (UID: "73a2604b-9d34-4dee-ab7a-1741dcda9c58"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.939351 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73a2604b-9d34-4dee-ab7a-1741dcda9c58-kube-api-access-zzxlz" (OuterVolumeSpecName: "kube-api-access-zzxlz") pod "73a2604b-9d34-4dee-ab7a-1741dcda9c58" (UID: "73a2604b-9d34-4dee-ab7a-1741dcda9c58"). InnerVolumeSpecName "kube-api-access-zzxlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:37:37 crc kubenswrapper[4879]: I1125 14:37:37.951697 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a2604b-9d34-4dee-ab7a-1741dcda9c58-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "73a2604b-9d34-4dee-ab7a-1741dcda9c58" (UID: "73a2604b-9d34-4dee-ab7a-1741dcda9c58"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:38 crc kubenswrapper[4879]: I1125 14:37:38.036314 4879 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/73a2604b-9d34-4dee-ab7a-1741dcda9c58-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:38 crc kubenswrapper[4879]: I1125 14:37:38.036351 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzxlz\" (UniqueName: \"kubernetes.io/projected/73a2604b-9d34-4dee-ab7a-1741dcda9c58-kube-api-access-zzxlz\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:38 crc kubenswrapper[4879]: I1125 14:37:38.036360 4879 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/73a2604b-9d34-4dee-ab7a-1741dcda9c58-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:38 crc kubenswrapper[4879]: I1125 14:37:38.597468 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kvkpx" event={"ID":"73a2604b-9d34-4dee-ab7a-1741dcda9c58","Type":"ContainerDied","Data":"af165bfc1847c859e2dacff5a9cf5ba7591b674b24d8d26e59000ec2dcf8e316"} Nov 25 14:37:38 crc kubenswrapper[4879]: I1125 14:37:38.597510 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af165bfc1847c859e2dacff5a9cf5ba7591b674b24d8d26e59000ec2dcf8e316" Nov 25 14:37:38 crc kubenswrapper[4879]: I1125 14:37:38.597542 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kvkpx" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.046228 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gvljt"] Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.046769 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" podUID="604232d4-a119-4628-99a6-0c23df51f851" containerName="controller-manager" containerID="cri-o://60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30" gracePeriod=30 Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.155959 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d"] Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.156201 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" podUID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" containerName="route-controller-manager" containerID="cri-o://08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96" gracePeriod=30 Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.439235 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.547644 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.559167 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-config\") pod \"604232d4-a119-4628-99a6-0c23df51f851\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.559234 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-proxy-ca-bundles\") pod \"604232d4-a119-4628-99a6-0c23df51f851\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.559263 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-client-ca\") pod \"604232d4-a119-4628-99a6-0c23df51f851\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562584 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/604232d4-a119-4628-99a6-0c23df51f851-serving-cert\") pod \"604232d4-a119-4628-99a6-0c23df51f851\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562626 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxzbc\" (UniqueName: \"kubernetes.io/projected/8fce6f2a-2662-4b9c-a631-9e590fc697cb-kube-api-access-fxzbc\") pod \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562651 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fce6f2a-2662-4b9c-a631-9e590fc697cb-serving-cert\") pod \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562703 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phqxx\" (UniqueName: \"kubernetes.io/projected/604232d4-a119-4628-99a6-0c23df51f851-kube-api-access-phqxx\") pod \"604232d4-a119-4628-99a6-0c23df51f851\" (UID: \"604232d4-a119-4628-99a6-0c23df51f851\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562738 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-client-ca\") pod \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562769 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-config\") pod \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\" (UID: \"8fce6f2a-2662-4b9c-a631-9e590fc697cb\") " Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.560158 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-config" (OuterVolumeSpecName: "config") pod "604232d4-a119-4628-99a6-0c23df51f851" (UID: "604232d4-a119-4628-99a6-0c23df51f851"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.560363 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "604232d4-a119-4628-99a6-0c23df51f851" (UID: "604232d4-a119-4628-99a6-0c23df51f851"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.562475 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-client-ca" (OuterVolumeSpecName: "client-ca") pod "604232d4-a119-4628-99a6-0c23df51f851" (UID: "604232d4-a119-4628-99a6-0c23df51f851"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.563317 4879 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.563331 4879 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.563340 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/604232d4-a119-4628-99a6-0c23df51f851-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.564342 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-client-ca" (OuterVolumeSpecName: "client-ca") pod "8fce6f2a-2662-4b9c-a631-9e590fc697cb" (UID: "8fce6f2a-2662-4b9c-a631-9e590fc697cb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.564471 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-config" (OuterVolumeSpecName: "config") pod "8fce6f2a-2662-4b9c-a631-9e590fc697cb" (UID: "8fce6f2a-2662-4b9c-a631-9e590fc697cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.568727 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fce6f2a-2662-4b9c-a631-9e590fc697cb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8fce6f2a-2662-4b9c-a631-9e590fc697cb" (UID: "8fce6f2a-2662-4b9c-a631-9e590fc697cb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.569044 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604232d4-a119-4628-99a6-0c23df51f851-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "604232d4-a119-4628-99a6-0c23df51f851" (UID: "604232d4-a119-4628-99a6-0c23df51f851"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.569147 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/604232d4-a119-4628-99a6-0c23df51f851-kube-api-access-phqxx" (OuterVolumeSpecName: "kube-api-access-phqxx") pod "604232d4-a119-4628-99a6-0c23df51f851" (UID: "604232d4-a119-4628-99a6-0c23df51f851"). InnerVolumeSpecName "kube-api-access-phqxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.570579 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fce6f2a-2662-4b9c-a631-9e590fc697cb-kube-api-access-fxzbc" (OuterVolumeSpecName: "kube-api-access-fxzbc") pod "8fce6f2a-2662-4b9c-a631-9e590fc697cb" (UID: "8fce6f2a-2662-4b9c-a631-9e590fc697cb"). InnerVolumeSpecName "kube-api-access-fxzbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.602768 4879 generic.go:334] "Generic (PLEG): container finished" podID="604232d4-a119-4628-99a6-0c23df51f851" containerID="60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30" exitCode=0 Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.602811 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" event={"ID":"604232d4-a119-4628-99a6-0c23df51f851","Type":"ContainerDied","Data":"60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30"} Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.602825 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.602846 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gvljt" event={"ID":"604232d4-a119-4628-99a6-0c23df51f851","Type":"ContainerDied","Data":"a3767b0ce7c649a7a6f15267b6c79ffd9bd0149238f6029e82dffb85e621161b"} Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.602862 4879 scope.go:117] "RemoveContainer" containerID="60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.604825 4879 generic.go:334] "Generic (PLEG): container finished" podID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" containerID="08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96" exitCode=0 Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.604867 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.604881 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" event={"ID":"8fce6f2a-2662-4b9c-a631-9e590fc697cb","Type":"ContainerDied","Data":"08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96"} Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.604934 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d" event={"ID":"8fce6f2a-2662-4b9c-a631-9e590fc697cb","Type":"ContainerDied","Data":"6ba0defdec3820e199cb74523e7c25a5da9a8e2e927aa758be1f343abc29c268"} Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.627944 4879 scope.go:117] "RemoveContainer" containerID="60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30" Nov 25 14:37:39 crc kubenswrapper[4879]: E1125 14:37:39.628527 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30\": container with ID starting with 60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30 not found: ID does not exist" containerID="60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.628587 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30"} err="failed to get container status \"60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30\": rpc error: code = NotFound desc = could not find container \"60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30\": container with ID starting with 60a9afcbdea8364c2ae27a57c3dded8badbacb8686feb9cf94eedd2901b74e30 not found: ID does not exist" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.628617 4879 scope.go:117] "RemoveContainer" containerID="08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.641794 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d"] Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.653478 4879 scope.go:117] "RemoveContainer" containerID="08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96" Nov 25 14:37:39 crc kubenswrapper[4879]: E1125 14:37:39.654215 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96\": container with ID starting with 08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96 not found: ID does not exist" containerID="08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.654308 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96"} err="failed to get container status \"08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96\": rpc error: code = NotFound desc = could not find container \"08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96\": container with ID starting with 08863d83cead3053e1168ec380358abf35e3a9d761c78fd75479ca17c9513c96 not found: ID does not exist" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.656267 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2498d"] Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.656301 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gvljt"] Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.656342 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gvljt"] Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.664285 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/604232d4-a119-4628-99a6-0c23df51f851-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.664313 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxzbc\" (UniqueName: \"kubernetes.io/projected/8fce6f2a-2662-4b9c-a631-9e590fc697cb-kube-api-access-fxzbc\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.664322 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fce6f2a-2662-4b9c-a631-9e590fc697cb-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.664352 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phqxx\" (UniqueName: \"kubernetes.io/projected/604232d4-a119-4628-99a6-0c23df51f851-kube-api-access-phqxx\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.664361 4879 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:39 crc kubenswrapper[4879]: I1125 14:37:39.664370 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fce6f2a-2662-4b9c-a631-9e590fc697cb-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421398 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-dc75f4fc-64h87"] Nov 25 14:37:40 crc kubenswrapper[4879]: E1125 14:37:40.421645 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" containerName="route-controller-manager" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421714 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" containerName="route-controller-manager" Nov 25 14:37:40 crc kubenswrapper[4879]: E1125 14:37:40.421727 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604232d4-a119-4628-99a6-0c23df51f851" containerName="controller-manager" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421735 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="604232d4-a119-4628-99a6-0c23df51f851" containerName="controller-manager" Nov 25 14:37:40 crc kubenswrapper[4879]: E1125 14:37:40.421759 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" containerName="storage" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421767 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" containerName="storage" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421870 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="604232d4-a119-4628-99a6-0c23df51f851" containerName="controller-manager" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421886 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" containerName="storage" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.421895 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" containerName="route-controller-manager" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.422750 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.424167 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.424943 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.425156 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.426649 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.426651 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.427190 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.428007 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6"] Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.430223 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.432056 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.432438 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.432619 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.432778 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.433059 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.433459 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.434260 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.434550 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6"] Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.435054 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-dc75f4fc-64h87"] Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.573868 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-config\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.573970 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-config\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574003 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c78ca25-409d-4aba-8db7-6401bc56b874-serving-cert\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574607 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-serving-cert\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574752 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-client-ca\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574789 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mt2p\" (UniqueName: \"kubernetes.io/projected/0c78ca25-409d-4aba-8db7-6401bc56b874-kube-api-access-8mt2p\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574812 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-proxy-ca-bundles\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574925 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhvh2\" (UniqueName: \"kubernetes.io/projected/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-kube-api-access-rhvh2\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.574972 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-client-ca\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.676803 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-config\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.676855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-config\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.676883 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c78ca25-409d-4aba-8db7-6401bc56b874-serving-cert\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.676933 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-serving-cert\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.676963 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-client-ca\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.676986 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mt2p\" (UniqueName: \"kubernetes.io/projected/0c78ca25-409d-4aba-8db7-6401bc56b874-kube-api-access-8mt2p\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.677009 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-proxy-ca-bundles\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.677039 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhvh2\" (UniqueName: \"kubernetes.io/projected/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-kube-api-access-rhvh2\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.677065 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-client-ca\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.678359 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-config\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.678794 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-proxy-ca-bundles\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.678833 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-client-ca\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.679350 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-client-ca\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.679416 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-config\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.681512 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c78ca25-409d-4aba-8db7-6401bc56b874-serving-cert\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.683524 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-serving-cert\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.694851 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mt2p\" (UniqueName: \"kubernetes.io/projected/0c78ca25-409d-4aba-8db7-6401bc56b874-kube-api-access-8mt2p\") pod \"controller-manager-dc75f4fc-64h87\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.699031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhvh2\" (UniqueName: \"kubernetes.io/projected/d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9-kube-api-access-rhvh2\") pod \"route-controller-manager-7587cfcdd5-7ltk6\" (UID: \"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9\") " pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.743149 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.756457 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.831515 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dc75f4fc-64h87"] Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.957134 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dc75f4fc-64h87"] Nov 25 14:37:40 crc kubenswrapper[4879]: W1125 14:37:40.961461 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c78ca25_409d_4aba_8db7_6401bc56b874.slice/crio-db54e06440865eb17b8b74b84fd23d8b3bc8734d2f335cfbd940efad8bdd4199 WatchSource:0}: Error finding container db54e06440865eb17b8b74b84fd23d8b3bc8734d2f335cfbd940efad8bdd4199: Status 404 returned error can't find the container with id db54e06440865eb17b8b74b84fd23d8b3bc8734d2f335cfbd940efad8bdd4199 Nov 25 14:37:40 crc kubenswrapper[4879]: I1125 14:37:40.989954 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6"] Nov 25 14:37:40 crc kubenswrapper[4879]: W1125 14:37:40.995296 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1feef39_9b3d_4ad5_88a9_9c99ef93f7d9.slice/crio-75b0cddb27472ef1a6990cb313c88b1e33f181d85c9572461c569220eb7d27b1 WatchSource:0}: Error finding container 75b0cddb27472ef1a6990cb313c88b1e33f181d85c9572461c569220eb7d27b1: Status 404 returned error can't find the container with id 75b0cddb27472ef1a6990cb313c88b1e33f181d85c9572461c569220eb7d27b1 Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.632081 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" podUID="0c78ca25-409d-4aba-8db7-6401bc56b874" containerName="controller-manager" containerID="cri-o://dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd" gracePeriod=30 Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.632229 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" event={"ID":"0c78ca25-409d-4aba-8db7-6401bc56b874","Type":"ContainerStarted","Data":"dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd"} Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.632500 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.632518 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" event={"ID":"0c78ca25-409d-4aba-8db7-6401bc56b874","Type":"ContainerStarted","Data":"db54e06440865eb17b8b74b84fd23d8b3bc8734d2f335cfbd940efad8bdd4199"} Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.633876 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" event={"ID":"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9","Type":"ContainerStarted","Data":"304adbd4f27f03930f4f9b47a1eb381a1007e9b3f4ff7630608a6a75ad76c356"} Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.633908 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" event={"ID":"d1feef39-9b3d-4ad5-88a9-9c99ef93f7d9","Type":"ContainerStarted","Data":"75b0cddb27472ef1a6990cb313c88b1e33f181d85c9572461c569220eb7d27b1"} Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.634262 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.640498 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.647203 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" podStartSLOduration=2.647189594 podStartE2EDuration="2.647189594s" podCreationTimestamp="2025-11-25 14:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:37:41.645949801 +0000 UTC m=+753.249362872" watchObservedRunningTime="2025-11-25 14:37:41.647189594 +0000 UTC m=+753.250602665" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.651748 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="604232d4-a119-4628-99a6-0c23df51f851" path="/var/lib/kubelet/pods/604232d4-a119-4628-99a6-0c23df51f851/volumes" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.652558 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fce6f2a-2662-4b9c-a631-9e590fc697cb" path="/var/lib/kubelet/pods/8fce6f2a-2662-4b9c-a631-9e590fc697cb/volumes" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.656424 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.663048 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7587cfcdd5-7ltk6" podStartSLOduration=2.663031674 podStartE2EDuration="2.663031674s" podCreationTimestamp="2025-11-25 14:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:37:41.661592705 +0000 UTC m=+753.265005776" watchObservedRunningTime="2025-11-25 14:37:41.663031674 +0000 UTC m=+753.266444735" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.952592 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.980042 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58c566f7fb-779bj"] Nov 25 14:37:41 crc kubenswrapper[4879]: E1125 14:37:41.980306 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c78ca25-409d-4aba-8db7-6401bc56b874" containerName="controller-manager" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.980323 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c78ca25-409d-4aba-8db7-6401bc56b874" containerName="controller-manager" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.980441 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c78ca25-409d-4aba-8db7-6401bc56b874" containerName="controller-manager" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.981410 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:41 crc kubenswrapper[4879]: I1125 14:37:41.987685 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58c566f7fb-779bj"] Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.093967 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mt2p\" (UniqueName: \"kubernetes.io/projected/0c78ca25-409d-4aba-8db7-6401bc56b874-kube-api-access-8mt2p\") pod \"0c78ca25-409d-4aba-8db7-6401bc56b874\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094399 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c78ca25-409d-4aba-8db7-6401bc56b874-serving-cert\") pod \"0c78ca25-409d-4aba-8db7-6401bc56b874\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094477 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-client-ca\") pod \"0c78ca25-409d-4aba-8db7-6401bc56b874\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094502 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-proxy-ca-bundles\") pod \"0c78ca25-409d-4aba-8db7-6401bc56b874\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094545 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-config\") pod \"0c78ca25-409d-4aba-8db7-6401bc56b874\" (UID: \"0c78ca25-409d-4aba-8db7-6401bc56b874\") " Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094745 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8604c68-c614-41af-83af-c59ed68d5937-serving-cert\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094799 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp6n8\" (UniqueName: \"kubernetes.io/projected/e8604c68-c614-41af-83af-c59ed68d5937-kube-api-access-kp6n8\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094848 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-client-ca\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094897 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-proxy-ca-bundles\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.094974 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-config\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.095294 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-client-ca" (OuterVolumeSpecName: "client-ca") pod "0c78ca25-409d-4aba-8db7-6401bc56b874" (UID: "0c78ca25-409d-4aba-8db7-6401bc56b874"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.095346 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0c78ca25-409d-4aba-8db7-6401bc56b874" (UID: "0c78ca25-409d-4aba-8db7-6401bc56b874"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.095573 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-config" (OuterVolumeSpecName: "config") pod "0c78ca25-409d-4aba-8db7-6401bc56b874" (UID: "0c78ca25-409d-4aba-8db7-6401bc56b874"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.101932 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c78ca25-409d-4aba-8db7-6401bc56b874-kube-api-access-8mt2p" (OuterVolumeSpecName: "kube-api-access-8mt2p") pod "0c78ca25-409d-4aba-8db7-6401bc56b874" (UID: "0c78ca25-409d-4aba-8db7-6401bc56b874"). InnerVolumeSpecName "kube-api-access-8mt2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.103333 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c78ca25-409d-4aba-8db7-6401bc56b874-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0c78ca25-409d-4aba-8db7-6401bc56b874" (UID: "0c78ca25-409d-4aba-8db7-6401bc56b874"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195412 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8604c68-c614-41af-83af-c59ed68d5937-serving-cert\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195481 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp6n8\" (UniqueName: \"kubernetes.io/projected/e8604c68-c614-41af-83af-c59ed68d5937-kube-api-access-kp6n8\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195522 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-client-ca\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195567 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-proxy-ca-bundles\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195590 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-config\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195638 4879 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c78ca25-409d-4aba-8db7-6401bc56b874-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195653 4879 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195664 4879 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195676 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c78ca25-409d-4aba-8db7-6401bc56b874-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.195687 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mt2p\" (UniqueName: \"kubernetes.io/projected/0c78ca25-409d-4aba-8db7-6401bc56b874-kube-api-access-8mt2p\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.196724 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-client-ca\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.197194 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-proxy-ca-bundles\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.197733 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8604c68-c614-41af-83af-c59ed68d5937-config\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.199152 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8604c68-c614-41af-83af-c59ed68d5937-serving-cert\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.209850 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp6n8\" (UniqueName: \"kubernetes.io/projected/e8604c68-c614-41af-83af-c59ed68d5937-kube-api-access-kp6n8\") pod \"controller-manager-58c566f7fb-779bj\" (UID: \"e8604c68-c614-41af-83af-c59ed68d5937\") " pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.301006 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.502657 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58c566f7fb-779bj"] Nov 25 14:37:42 crc kubenswrapper[4879]: W1125 14:37:42.506442 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8604c68_c614_41af_83af_c59ed68d5937.slice/crio-e7d5476d117702929f4dbbce0065b883fc3811c1c507528332aa2ebf76035e0f WatchSource:0}: Error finding container e7d5476d117702929f4dbbce0065b883fc3811c1c507528332aa2ebf76035e0f: Status 404 returned error can't find the container with id e7d5476d117702929f4dbbce0065b883fc3811c1c507528332aa2ebf76035e0f Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.641732 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c78ca25-409d-4aba-8db7-6401bc56b874" containerID="dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd" exitCode=0 Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.641803 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" event={"ID":"0c78ca25-409d-4aba-8db7-6401bc56b874","Type":"ContainerDied","Data":"dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd"} Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.641821 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.642178 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc75f4fc-64h87" event={"ID":"0c78ca25-409d-4aba-8db7-6401bc56b874","Type":"ContainerDied","Data":"db54e06440865eb17b8b74b84fd23d8b3bc8734d2f335cfbd940efad8bdd4199"} Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.642207 4879 scope.go:117] "RemoveContainer" containerID="dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.643993 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" event={"ID":"e8604c68-c614-41af-83af-c59ed68d5937","Type":"ContainerStarted","Data":"e7d5476d117702929f4dbbce0065b883fc3811c1c507528332aa2ebf76035e0f"} Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.659320 4879 scope.go:117] "RemoveContainer" containerID="dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd" Nov 25 14:37:42 crc kubenswrapper[4879]: E1125 14:37:42.659775 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd\": container with ID starting with dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd not found: ID does not exist" containerID="dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.659844 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd"} err="failed to get container status \"dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd\": rpc error: code = NotFound desc = could not find container \"dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd\": container with ID starting with dc4316fdbfa5ac3437c598b91eca9723fc15c20e5cb03aa2023a1714df4abfcd not found: ID does not exist" Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.690056 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dc75f4fc-64h87"] Nov 25 14:37:42 crc kubenswrapper[4879]: I1125 14:37:42.693668 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-dc75f4fc-64h87"] Nov 25 14:37:43 crc kubenswrapper[4879]: I1125 14:37:43.654584 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c78ca25-409d-4aba-8db7-6401bc56b874" path="/var/lib/kubelet/pods/0c78ca25-409d-4aba-8db7-6401bc56b874/volumes" Nov 25 14:37:43 crc kubenswrapper[4879]: I1125 14:37:43.655036 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" event={"ID":"e8604c68-c614-41af-83af-c59ed68d5937","Type":"ContainerStarted","Data":"2a963052658e33080f8da5fd07619b82f5cebc2f0e9ac78507156b3a9c6d7c66"} Nov 25 14:37:43 crc kubenswrapper[4879]: I1125 14:37:43.673653 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" podStartSLOduration=3.673631802 podStartE2EDuration="3.673631802s" podCreationTimestamp="2025-11-25 14:37:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:37:43.671560515 +0000 UTC m=+755.274973596" watchObservedRunningTime="2025-11-25 14:37:43.673631802 +0000 UTC m=+755.277044873" Nov 25 14:37:44 crc kubenswrapper[4879]: I1125 14:37:44.085218 4879 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 25 14:37:44 crc kubenswrapper[4879]: I1125 14:37:44.659524 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:44 crc kubenswrapper[4879]: I1125 14:37:44.664396 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58c566f7fb-779bj" Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.779789 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc"] Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.780869 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.782983 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.790348 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc"] Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.947348 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.947455 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:45 crc kubenswrapper[4879]: I1125 14:37:45.947483 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79k6n\" (UniqueName: \"kubernetes.io/projected/6c4acdfd-e627-4666-b24c-51cf346e3757-kube-api-access-79k6n\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.048341 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.048448 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.048827 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.048869 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.048484 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79k6n\" (UniqueName: \"kubernetes.io/projected/6c4acdfd-e627-4666-b24c-51cf346e3757-kube-api-access-79k6n\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.065600 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79k6n\" (UniqueName: \"kubernetes.io/projected/6c4acdfd-e627-4666-b24c-51cf346e3757-kube-api-access-79k6n\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.097526 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.529861 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc"] Nov 25 14:37:46 crc kubenswrapper[4879]: W1125 14:37:46.540649 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c4acdfd_e627_4666_b24c_51cf346e3757.slice/crio-3d5012e46c4f06c296c6c320b496d54db919bece9043ca8b01425825ce281a1d WatchSource:0}: Error finding container 3d5012e46c4f06c296c6c320b496d54db919bece9043ca8b01425825ce281a1d: Status 404 returned error can't find the container with id 3d5012e46c4f06c296c6c320b496d54db919bece9043ca8b01425825ce281a1d Nov 25 14:37:46 crc kubenswrapper[4879]: I1125 14:37:46.670370 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" event={"ID":"6c4acdfd-e627-4666-b24c-51cf346e3757","Type":"ContainerStarted","Data":"3d5012e46c4f06c296c6c320b496d54db919bece9043ca8b01425825ce281a1d"} Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.409367 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.409434 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.539944 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cntsw"] Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.541099 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.550978 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cntsw"] Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.675565 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-utilities\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.675616 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-catalog-content\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.675634 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxgdf\" (UniqueName: \"kubernetes.io/projected/d96f7746-f239-4b74-843d-626d5c343edd-kube-api-access-pxgdf\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.677645 4879 generic.go:334] "Generic (PLEG): container finished" podID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerID="c90b1df78d585c270ca946422f28532dd1897bf359ad2ddba58a35d08490225b" exitCode=0 Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.677681 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" event={"ID":"6c4acdfd-e627-4666-b24c-51cf346e3757","Type":"ContainerDied","Data":"c90b1df78d585c270ca946422f28532dd1897bf359ad2ddba58a35d08490225b"} Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.777379 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-utilities\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.777477 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-catalog-content\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.777504 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxgdf\" (UniqueName: \"kubernetes.io/projected/d96f7746-f239-4b74-843d-626d5c343edd-kube-api-access-pxgdf\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.777984 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-catalog-content\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.778239 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-utilities\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.814881 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxgdf\" (UniqueName: \"kubernetes.io/projected/d96f7746-f239-4b74-843d-626d5c343edd-kube-api-access-pxgdf\") pod \"redhat-operators-cntsw\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:47 crc kubenswrapper[4879]: I1125 14:37:47.886513 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:48 crc kubenswrapper[4879]: I1125 14:37:48.287851 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cntsw"] Nov 25 14:37:48 crc kubenswrapper[4879]: W1125 14:37:48.294318 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd96f7746_f239_4b74_843d_626d5c343edd.slice/crio-db350a4cdd8025b454bfcb94fe9c7a8d89812cff090f2afb500f1b51eb75da57 WatchSource:0}: Error finding container db350a4cdd8025b454bfcb94fe9c7a8d89812cff090f2afb500f1b51eb75da57: Status 404 returned error can't find the container with id db350a4cdd8025b454bfcb94fe9c7a8d89812cff090f2afb500f1b51eb75da57 Nov 25 14:37:48 crc kubenswrapper[4879]: I1125 14:37:48.684055 4879 generic.go:334] "Generic (PLEG): container finished" podID="d96f7746-f239-4b74-843d-626d5c343edd" containerID="7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90" exitCode=0 Nov 25 14:37:48 crc kubenswrapper[4879]: I1125 14:37:48.684148 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cntsw" event={"ID":"d96f7746-f239-4b74-843d-626d5c343edd","Type":"ContainerDied","Data":"7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90"} Nov 25 14:37:48 crc kubenswrapper[4879]: I1125 14:37:48.684381 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cntsw" event={"ID":"d96f7746-f239-4b74-843d-626d5c343edd","Type":"ContainerStarted","Data":"db350a4cdd8025b454bfcb94fe9c7a8d89812cff090f2afb500f1b51eb75da57"} Nov 25 14:37:49 crc kubenswrapper[4879]: I1125 14:37:49.691915 4879 generic.go:334] "Generic (PLEG): container finished" podID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerID="648ba2762dac0ec36905775a850cd9ec2b2af3fd1f8dba8935fb99a236b9f325" exitCode=0 Nov 25 14:37:49 crc kubenswrapper[4879]: I1125 14:37:49.691987 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" event={"ID":"6c4acdfd-e627-4666-b24c-51cf346e3757","Type":"ContainerDied","Data":"648ba2762dac0ec36905775a850cd9ec2b2af3fd1f8dba8935fb99a236b9f325"} Nov 25 14:37:50 crc kubenswrapper[4879]: I1125 14:37:50.699395 4879 generic.go:334] "Generic (PLEG): container finished" podID="d96f7746-f239-4b74-843d-626d5c343edd" containerID="6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3" exitCode=0 Nov 25 14:37:50 crc kubenswrapper[4879]: I1125 14:37:50.699459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cntsw" event={"ID":"d96f7746-f239-4b74-843d-626d5c343edd","Type":"ContainerDied","Data":"6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3"} Nov 25 14:37:50 crc kubenswrapper[4879]: I1125 14:37:50.702345 4879 generic.go:334] "Generic (PLEG): container finished" podID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerID="9eb47d627173b26086292d02ac6cd3f4c84d1d806cf3e278104969aceb56eae5" exitCode=0 Nov 25 14:37:50 crc kubenswrapper[4879]: I1125 14:37:50.702541 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" event={"ID":"6c4acdfd-e627-4666-b24c-51cf346e3757","Type":"ContainerDied","Data":"9eb47d627173b26086292d02ac6cd3f4c84d1d806cf3e278104969aceb56eae5"} Nov 25 14:37:51 crc kubenswrapper[4879]: I1125 14:37:51.712707 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cntsw" event={"ID":"d96f7746-f239-4b74-843d-626d5c343edd","Type":"ContainerStarted","Data":"1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de"} Nov 25 14:37:51 crc kubenswrapper[4879]: I1125 14:37:51.734444 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cntsw" podStartSLOduration=2.08069396 podStartE2EDuration="4.734424448s" podCreationTimestamp="2025-11-25 14:37:47 +0000 UTC" firstStartedPulling="2025-11-25 14:37:48.685443815 +0000 UTC m=+760.288856886" lastFinishedPulling="2025-11-25 14:37:51.339174303 +0000 UTC m=+762.942587374" observedRunningTime="2025-11-25 14:37:51.731783796 +0000 UTC m=+763.335196887" watchObservedRunningTime="2025-11-25 14:37:51.734424448 +0000 UTC m=+763.337837519" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.079565 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.234372 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79k6n\" (UniqueName: \"kubernetes.io/projected/6c4acdfd-e627-4666-b24c-51cf346e3757-kube-api-access-79k6n\") pod \"6c4acdfd-e627-4666-b24c-51cf346e3757\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.234539 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-util\") pod \"6c4acdfd-e627-4666-b24c-51cf346e3757\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.234577 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-bundle\") pod \"6c4acdfd-e627-4666-b24c-51cf346e3757\" (UID: \"6c4acdfd-e627-4666-b24c-51cf346e3757\") " Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.235201 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-bundle" (OuterVolumeSpecName: "bundle") pod "6c4acdfd-e627-4666-b24c-51cf346e3757" (UID: "6c4acdfd-e627-4666-b24c-51cf346e3757"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.239089 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c4acdfd-e627-4666-b24c-51cf346e3757-kube-api-access-79k6n" (OuterVolumeSpecName: "kube-api-access-79k6n") pod "6c4acdfd-e627-4666-b24c-51cf346e3757" (UID: "6c4acdfd-e627-4666-b24c-51cf346e3757"). InnerVolumeSpecName "kube-api-access-79k6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.244180 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-util" (OuterVolumeSpecName: "util") pod "6c4acdfd-e627-4666-b24c-51cf346e3757" (UID: "6c4acdfd-e627-4666-b24c-51cf346e3757"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.335952 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79k6n\" (UniqueName: \"kubernetes.io/projected/6c4acdfd-e627-4666-b24c-51cf346e3757-kube-api-access-79k6n\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.335986 4879 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-util\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.335995 4879 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c4acdfd-e627-4666-b24c-51cf346e3757-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.721676 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" event={"ID":"6c4acdfd-e627-4666-b24c-51cf346e3757","Type":"ContainerDied","Data":"3d5012e46c4f06c296c6c320b496d54db919bece9043ca8b01425825ce281a1d"} Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.722290 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d5012e46c4f06c296c6c320b496d54db919bece9043ca8b01425825ce281a1d" Nov 25 14:37:52 crc kubenswrapper[4879]: I1125 14:37:52.721721 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.316835 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-cx9pc"] Nov 25 14:37:57 crc kubenswrapper[4879]: E1125 14:37:57.317550 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="extract" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.317566 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="extract" Nov 25 14:37:57 crc kubenswrapper[4879]: E1125 14:37:57.317606 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="pull" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.317613 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="pull" Nov 25 14:37:57 crc kubenswrapper[4879]: E1125 14:37:57.317623 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="util" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.317632 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="util" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.317728 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c4acdfd-e627-4666-b24c-51cf346e3757" containerName="extract" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.318245 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.320968 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.321591 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.321637 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-hmphj" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.327679 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-cx9pc"] Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.499628 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvkx8\" (UniqueName: \"kubernetes.io/projected/547de8cf-5f37-47e7-80a8-c83f0f0ce36f-kube-api-access-jvkx8\") pod \"nmstate-operator-557fdffb88-cx9pc\" (UID: \"547de8cf-5f37-47e7-80a8-c83f0f0ce36f\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.601789 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvkx8\" (UniqueName: \"kubernetes.io/projected/547de8cf-5f37-47e7-80a8-c83f0f0ce36f-kube-api-access-jvkx8\") pod \"nmstate-operator-557fdffb88-cx9pc\" (UID: \"547de8cf-5f37-47e7-80a8-c83f0f0ce36f\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.630253 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvkx8\" (UniqueName: \"kubernetes.io/projected/547de8cf-5f37-47e7-80a8-c83f0f0ce36f-kube-api-access-jvkx8\") pod \"nmstate-operator-557fdffb88-cx9pc\" (UID: \"547de8cf-5f37-47e7-80a8-c83f0f0ce36f\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.638910 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.887384 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.887844 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:57 crc kubenswrapper[4879]: I1125 14:37:57.925867 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:37:58 crc kubenswrapper[4879]: I1125 14:37:58.098499 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-cx9pc"] Nov 25 14:37:58 crc kubenswrapper[4879]: I1125 14:37:58.755103 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" event={"ID":"547de8cf-5f37-47e7-80a8-c83f0f0ce36f","Type":"ContainerStarted","Data":"1ae9b890aac8d62677b327aea496a89ca81191307936fc272a2e5a449f906e4b"} Nov 25 14:37:58 crc kubenswrapper[4879]: I1125 14:37:58.794341 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:38:00 crc kubenswrapper[4879]: I1125 14:38:00.731439 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cntsw"] Nov 25 14:38:01 crc kubenswrapper[4879]: I1125 14:38:01.772958 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cntsw" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="registry-server" containerID="cri-o://1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de" gracePeriod=2 Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.262675 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.270460 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-catalog-content\") pod \"d96f7746-f239-4b74-843d-626d5c343edd\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.270568 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-utilities\") pod \"d96f7746-f239-4b74-843d-626d5c343edd\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.270627 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxgdf\" (UniqueName: \"kubernetes.io/projected/d96f7746-f239-4b74-843d-626d5c343edd-kube-api-access-pxgdf\") pod \"d96f7746-f239-4b74-843d-626d5c343edd\" (UID: \"d96f7746-f239-4b74-843d-626d5c343edd\") " Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.272962 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-utilities" (OuterVolumeSpecName: "utilities") pod "d96f7746-f239-4b74-843d-626d5c343edd" (UID: "d96f7746-f239-4b74-843d-626d5c343edd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.277046 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d96f7746-f239-4b74-843d-626d5c343edd-kube-api-access-pxgdf" (OuterVolumeSpecName: "kube-api-access-pxgdf") pod "d96f7746-f239-4b74-843d-626d5c343edd" (UID: "d96f7746-f239-4b74-843d-626d5c343edd"). InnerVolumeSpecName "kube-api-access-pxgdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.372430 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.372470 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxgdf\" (UniqueName: \"kubernetes.io/projected/d96f7746-f239-4b74-843d-626d5c343edd-kube-api-access-pxgdf\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.576525 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d96f7746-f239-4b74-843d-626d5c343edd" (UID: "d96f7746-f239-4b74-843d-626d5c343edd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.677279 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d96f7746-f239-4b74-843d-626d5c343edd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.782870 4879 generic.go:334] "Generic (PLEG): container finished" podID="d96f7746-f239-4b74-843d-626d5c343edd" containerID="1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de" exitCode=0 Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.782940 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cntsw" event={"ID":"d96f7746-f239-4b74-843d-626d5c343edd","Type":"ContainerDied","Data":"1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de"} Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.783015 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cntsw" event={"ID":"d96f7746-f239-4b74-843d-626d5c343edd","Type":"ContainerDied","Data":"db350a4cdd8025b454bfcb94fe9c7a8d89812cff090f2afb500f1b51eb75da57"} Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.782960 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cntsw" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.783044 4879 scope.go:117] "RemoveContainer" containerID="1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.786341 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" event={"ID":"547de8cf-5f37-47e7-80a8-c83f0f0ce36f","Type":"ContainerStarted","Data":"b3087ee39902504cf5a2f1b6619d9d7395f0694d1a8299ca3ba7164a85691a90"} Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.815847 4879 scope.go:117] "RemoveContainer" containerID="6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.816344 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-557fdffb88-cx9pc" podStartSLOduration=2.363405019 podStartE2EDuration="5.816326302s" podCreationTimestamp="2025-11-25 14:37:57 +0000 UTC" firstStartedPulling="2025-11-25 14:37:58.114549971 +0000 UTC m=+769.717963042" lastFinishedPulling="2025-11-25 14:38:01.567471254 +0000 UTC m=+773.170884325" observedRunningTime="2025-11-25 14:38:02.814861512 +0000 UTC m=+774.418274603" watchObservedRunningTime="2025-11-25 14:38:02.816326302 +0000 UTC m=+774.419739373" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.833573 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cntsw"] Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.836381 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cntsw"] Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.852367 4879 scope.go:117] "RemoveContainer" containerID="7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.873689 4879 scope.go:117] "RemoveContainer" containerID="1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de" Nov 25 14:38:02 crc kubenswrapper[4879]: E1125 14:38:02.876475 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de\": container with ID starting with 1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de not found: ID does not exist" containerID="1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.876512 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de"} err="failed to get container status \"1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de\": rpc error: code = NotFound desc = could not find container \"1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de\": container with ID starting with 1d80f30421ea84777304b7118629716d693dbdd4e3d62dada8247295020df1de not found: ID does not exist" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.876538 4879 scope.go:117] "RemoveContainer" containerID="6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3" Nov 25 14:38:02 crc kubenswrapper[4879]: E1125 14:38:02.877013 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3\": container with ID starting with 6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3 not found: ID does not exist" containerID="6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.877086 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3"} err="failed to get container status \"6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3\": rpc error: code = NotFound desc = could not find container \"6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3\": container with ID starting with 6c6f1db8da7ee0aa66aad95ab6668de90ece21469b9b42abf595d40aae39f7b3 not found: ID does not exist" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.877145 4879 scope.go:117] "RemoveContainer" containerID="7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90" Nov 25 14:38:02 crc kubenswrapper[4879]: E1125 14:38:02.877580 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90\": container with ID starting with 7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90 not found: ID does not exist" containerID="7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90" Nov 25 14:38:02 crc kubenswrapper[4879]: I1125 14:38:02.877610 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90"} err="failed to get container status \"7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90\": rpc error: code = NotFound desc = could not find container \"7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90\": container with ID starting with 7dcce138982d674d43ca4b0969c3cc203107865d0dc1ca07793e8544e1a27d90 not found: ID does not exist" Nov 25 14:38:03 crc kubenswrapper[4879]: I1125 14:38:03.650254 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d96f7746-f239-4b74-843d-626d5c343edd" path="/var/lib/kubelet/pods/d96f7746-f239-4b74-843d-626d5c343edd/volumes" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.354961 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw"] Nov 25 14:38:06 crc kubenswrapper[4879]: E1125 14:38:06.355556 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="registry-server" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.355571 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="registry-server" Nov 25 14:38:06 crc kubenswrapper[4879]: E1125 14:38:06.355584 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="extract-utilities" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.355593 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="extract-utilities" Nov 25 14:38:06 crc kubenswrapper[4879]: E1125 14:38:06.355607 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="extract-content" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.355615 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="extract-content" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.355730 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96f7746-f239-4b74-843d-626d5c343edd" containerName="registry-server" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.356409 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.358012 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-zxms2" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.373962 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.378422 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.384862 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.396862 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.404090 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.420814 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-f4xpk"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.421509 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.497673 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.498635 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.500918 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.501094 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fnz84" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.501141 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.518377 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.519726 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865254-ee4c-44ea-947e-b126835519db-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.519786 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a7865254-ee4c-44ea-947e-b126835519db-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.519817 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8nkq\" (UniqueName: \"kubernetes.io/projected/a7865254-ee4c-44ea-947e-b126835519db-kube-api-access-b8nkq\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.519851 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j6s5\" (UniqueName: \"kubernetes.io/projected/13269f38-1cf7-42da-b5b9-7b61d2693aeb-kube-api-access-8j6s5\") pod \"nmstate-webhook-6b89b748d8-mxnc7\" (UID: \"13269f38-1cf7-42da-b5b9-7b61d2693aeb\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.519893 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmvkl\" (UniqueName: \"kubernetes.io/projected/19a3b86c-ed4d-436a-a58d-cd7027490ce0-kube-api-access-dmvkl\") pod \"nmstate-metrics-5dcf9c57c5-9g2fw\" (UID: \"19a3b86c-ed4d-436a-a58d-cd7027490ce0\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.519924 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/13269f38-1cf7-42da-b5b9-7b61d2693aeb-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-mxnc7\" (UID: \"13269f38-1cf7-42da-b5b9-7b61d2693aeb\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.621422 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-dbus-socket\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.621519 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a7865254-ee4c-44ea-947e-b126835519db-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.621648 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-nmstate-lock\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.621717 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8nkq\" (UniqueName: \"kubernetes.io/projected/a7865254-ee4c-44ea-947e-b126835519db-kube-api-access-b8nkq\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.621858 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j6s5\" (UniqueName: \"kubernetes.io/projected/13269f38-1cf7-42da-b5b9-7b61d2693aeb-kube-api-access-8j6s5\") pod \"nmstate-webhook-6b89b748d8-mxnc7\" (UID: \"13269f38-1cf7-42da-b5b9-7b61d2693aeb\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.621974 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmvkl\" (UniqueName: \"kubernetes.io/projected/19a3b86c-ed4d-436a-a58d-cd7027490ce0-kube-api-access-dmvkl\") pod \"nmstate-metrics-5dcf9c57c5-9g2fw\" (UID: \"19a3b86c-ed4d-436a-a58d-cd7027490ce0\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.622036 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-ovs-socket\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.622092 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/13269f38-1cf7-42da-b5b9-7b61d2693aeb-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-mxnc7\" (UID: \"13269f38-1cf7-42da-b5b9-7b61d2693aeb\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.622176 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfn2v\" (UniqueName: \"kubernetes.io/projected/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-kube-api-access-nfn2v\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.622238 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865254-ee4c-44ea-947e-b126835519db-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: E1125 14:38:06.622395 4879 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Nov 25 14:38:06 crc kubenswrapper[4879]: E1125 14:38:06.622453 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a7865254-ee4c-44ea-947e-b126835519db-plugin-serving-cert podName:a7865254-ee4c-44ea-947e-b126835519db nodeName:}" failed. No retries permitted until 2025-11-25 14:38:07.122435839 +0000 UTC m=+778.725848910 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/a7865254-ee4c-44ea-947e-b126835519db-plugin-serving-cert") pod "nmstate-console-plugin-5874bd7bc5-4wlrq" (UID: "a7865254-ee4c-44ea-947e-b126835519db") : secret "plugin-serving-cert" not found Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.622582 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a7865254-ee4c-44ea-947e-b126835519db-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.637700 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/13269f38-1cf7-42da-b5b9-7b61d2693aeb-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-mxnc7\" (UID: \"13269f38-1cf7-42da-b5b9-7b61d2693aeb\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.657223 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j6s5\" (UniqueName: \"kubernetes.io/projected/13269f38-1cf7-42da-b5b9-7b61d2693aeb-kube-api-access-8j6s5\") pod \"nmstate-webhook-6b89b748d8-mxnc7\" (UID: \"13269f38-1cf7-42da-b5b9-7b61d2693aeb\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.658108 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8nkq\" (UniqueName: \"kubernetes.io/projected/a7865254-ee4c-44ea-947e-b126835519db-kube-api-access-b8nkq\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.660078 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmvkl\" (UniqueName: \"kubernetes.io/projected/19a3b86c-ed4d-436a-a58d-cd7027490ce0-kube-api-access-dmvkl\") pod \"nmstate-metrics-5dcf9c57c5-9g2fw\" (UID: \"19a3b86c-ed4d-436a-a58d-cd7027490ce0\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.685625 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.701537 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6bb5cd776f-svswp"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.702462 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.719960 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.722880 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-serving-cert\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.722912 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-config\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.722928 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-service-ca\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.722958 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfn2v\" (UniqueName: \"kubernetes.io/projected/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-kube-api-access-nfn2v\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.722995 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-dbus-socket\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723017 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-nmstate-lock\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723049 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-oauth-serving-cert\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723100 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-oauth-config\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723115 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dh4k\" (UniqueName: \"kubernetes.io/projected/1cf3f5fd-723d-4cc4-9456-b77b40f25752-kube-api-access-6dh4k\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723147 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-ovs-socket\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723163 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-trusted-ca-bundle\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723808 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-dbus-socket\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723851 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-nmstate-lock\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.723888 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-ovs-socket\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.740904 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6bb5cd776f-svswp"] Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.749778 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfn2v\" (UniqueName: \"kubernetes.io/projected/1b17dab8-ef46-4084-b1f2-8a5e35c2a73c-kube-api-access-nfn2v\") pod \"nmstate-handler-f4xpk\" (UID: \"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c\") " pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826296 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-oauth-serving-cert\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826652 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-oauth-config\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826672 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dh4k\" (UniqueName: \"kubernetes.io/projected/1cf3f5fd-723d-4cc4-9456-b77b40f25752-kube-api-access-6dh4k\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826699 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-trusted-ca-bundle\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826726 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-serving-cert\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826745 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-config\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.826766 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-service-ca\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.828056 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-service-ca\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.828100 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-oauth-serving-cert\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.828768 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-config\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.829993 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cf3f5fd-723d-4cc4-9456-b77b40f25752-trusted-ca-bundle\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.831059 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-oauth-config\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.833922 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1cf3f5fd-723d-4cc4-9456-b77b40f25752-console-serving-cert\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:06 crc kubenswrapper[4879]: I1125 14:38:06.847495 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dh4k\" (UniqueName: \"kubernetes.io/projected/1cf3f5fd-723d-4cc4-9456-b77b40f25752-kube-api-access-6dh4k\") pod \"console-6bb5cd776f-svswp\" (UID: \"1cf3f5fd-723d-4cc4-9456-b77b40f25752\") " pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.041640 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.059132 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.130790 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865254-ee4c-44ea-947e-b126835519db-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.134507 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865254-ee4c-44ea-947e-b126835519db-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-4wlrq\" (UID: \"a7865254-ee4c-44ea-947e-b126835519db\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.145592 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw"] Nov 25 14:38:07 crc kubenswrapper[4879]: W1125 14:38:07.153130 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19a3b86c_ed4d_436a_a58d_cd7027490ce0.slice/crio-fa1385894dbeff7c0e9aca654610d92ba6c34b1b6e70eb8d19eaf345d2db42fb WatchSource:0}: Error finding container fa1385894dbeff7c0e9aca654610d92ba6c34b1b6e70eb8d19eaf345d2db42fb: Status 404 returned error can't find the container with id fa1385894dbeff7c0e9aca654610d92ba6c34b1b6e70eb8d19eaf345d2db42fb Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.204449 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7"] Nov 25 14:38:07 crc kubenswrapper[4879]: W1125 14:38:07.211609 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13269f38_1cf7_42da_b5b9_7b61d2693aeb.slice/crio-0a0573759dd5ba4310f33320e213c5e168b1a01325108196ca6925c878c8f32e WatchSource:0}: Error finding container 0a0573759dd5ba4310f33320e213c5e168b1a01325108196ca6925c878c8f32e: Status 404 returned error can't find the container with id 0a0573759dd5ba4310f33320e213c5e168b1a01325108196ca6925c878c8f32e Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.419078 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.440532 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6bb5cd776f-svswp"] Nov 25 14:38:07 crc kubenswrapper[4879]: W1125 14:38:07.448600 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cf3f5fd_723d_4cc4_9456_b77b40f25752.slice/crio-c9d29a686c7600e99940c7af099d2e765e4d06e911df5073a6986944b3f4cc80 WatchSource:0}: Error finding container c9d29a686c7600e99940c7af099d2e765e4d06e911df5073a6986944b3f4cc80: Status 404 returned error can't find the container with id c9d29a686c7600e99940c7af099d2e765e4d06e911df5073a6986944b3f4cc80 Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.829024 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" event={"ID":"13269f38-1cf7-42da-b5b9-7b61d2693aeb","Type":"ContainerStarted","Data":"0a0573759dd5ba4310f33320e213c5e168b1a01325108196ca6925c878c8f32e"} Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.830928 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6bb5cd776f-svswp" event={"ID":"1cf3f5fd-723d-4cc4-9456-b77b40f25752","Type":"ContainerStarted","Data":"0154659d692137b0f40f288d00fccee709af883a61df6fe32737a0fde0e054cb"} Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.830965 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6bb5cd776f-svswp" event={"ID":"1cf3f5fd-723d-4cc4-9456-b77b40f25752","Type":"ContainerStarted","Data":"c9d29a686c7600e99940c7af099d2e765e4d06e911df5073a6986944b3f4cc80"} Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.832036 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" event={"ID":"19a3b86c-ed4d-436a-a58d-cd7027490ce0","Type":"ContainerStarted","Data":"fa1385894dbeff7c0e9aca654610d92ba6c34b1b6e70eb8d19eaf345d2db42fb"} Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.833571 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-f4xpk" event={"ID":"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c","Type":"ContainerStarted","Data":"e5200212b62625fa15b813aa58186b86696b81ba74fd1496c88b00a755342f64"} Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.861460 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6bb5cd776f-svswp" podStartSLOduration=1.861437972 podStartE2EDuration="1.861437972s" podCreationTimestamp="2025-11-25 14:38:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:38:07.853554534 +0000 UTC m=+779.456967625" watchObservedRunningTime="2025-11-25 14:38:07.861437972 +0000 UTC m=+779.464851043" Nov 25 14:38:07 crc kubenswrapper[4879]: I1125 14:38:07.873879 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq"] Nov 25 14:38:07 crc kubenswrapper[4879]: W1125 14:38:07.879615 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7865254_ee4c_44ea_947e_b126835519db.slice/crio-6304db471a1aa507a68374cfee786e447254eaa2f517a534002b1b240fe24984 WatchSource:0}: Error finding container 6304db471a1aa507a68374cfee786e447254eaa2f517a534002b1b240fe24984: Status 404 returned error can't find the container with id 6304db471a1aa507a68374cfee786e447254eaa2f517a534002b1b240fe24984 Nov 25 14:38:08 crc kubenswrapper[4879]: I1125 14:38:08.841829 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" event={"ID":"a7865254-ee4c-44ea-947e-b126835519db","Type":"ContainerStarted","Data":"6304db471a1aa507a68374cfee786e447254eaa2f517a534002b1b240fe24984"} Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.873397 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-f4xpk" event={"ID":"1b17dab8-ef46-4084-b1f2-8a5e35c2a73c","Type":"ContainerStarted","Data":"b2026c6aced00e7c1eb1f4cf756f7d60572e9b08ec781768d0d940256a37a86c"} Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.874208 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.875632 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" event={"ID":"13269f38-1cf7-42da-b5b9-7b61d2693aeb","Type":"ContainerStarted","Data":"94865e5ed66adcccd7b116f0a64b6614bb6a19ba048f5f58efe495abc2e152f7"} Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.875850 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.877497 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" event={"ID":"19a3b86c-ed4d-436a-a58d-cd7027490ce0","Type":"ContainerStarted","Data":"1208fa3c76bbf6fa5dbe6c82963f969751808c0351bfd9e9dd4c70601658bef0"} Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.879229 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" event={"ID":"a7865254-ee4c-44ea-947e-b126835519db","Type":"ContainerStarted","Data":"93b3ce2ccd2d7fe1487e3f1c172f19d011587a457f279d1d0df7ac4d17286f48"} Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.898176 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-f4xpk" podStartSLOduration=1.894554254 podStartE2EDuration="6.898158663s" podCreationTimestamp="2025-11-25 14:38:06 +0000 UTC" firstStartedPulling="2025-11-25 14:38:07.063150547 +0000 UTC m=+778.666563618" lastFinishedPulling="2025-11-25 14:38:12.066754956 +0000 UTC m=+783.670168027" observedRunningTime="2025-11-25 14:38:12.892827819 +0000 UTC m=+784.496240880" watchObservedRunningTime="2025-11-25 14:38:12.898158663 +0000 UTC m=+784.501571734" Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.909047 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-4wlrq" podStartSLOduration=2.7164112449999998 podStartE2EDuration="6.909023445s" podCreationTimestamp="2025-11-25 14:38:06 +0000 UTC" firstStartedPulling="2025-11-25 14:38:07.881784076 +0000 UTC m=+779.485197147" lastFinishedPulling="2025-11-25 14:38:12.074396256 +0000 UTC m=+783.677809347" observedRunningTime="2025-11-25 14:38:12.907703207 +0000 UTC m=+784.511116308" watchObservedRunningTime="2025-11-25 14:38:12.909023445 +0000 UTC m=+784.512436516" Nov 25 14:38:12 crc kubenswrapper[4879]: I1125 14:38:12.928437 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" podStartSLOduration=2.075173411 podStartE2EDuration="6.928419384s" podCreationTimestamp="2025-11-25 14:38:06 +0000 UTC" firstStartedPulling="2025-11-25 14:38:07.213392289 +0000 UTC m=+778.816805360" lastFinishedPulling="2025-11-25 14:38:12.066638222 +0000 UTC m=+783.670051333" observedRunningTime="2025-11-25 14:38:12.927587029 +0000 UTC m=+784.531000090" watchObservedRunningTime="2025-11-25 14:38:12.928419384 +0000 UTC m=+784.531832445" Nov 25 14:38:15 crc kubenswrapper[4879]: I1125 14:38:15.903878 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" event={"ID":"19a3b86c-ed4d-436a-a58d-cd7027490ce0","Type":"ContainerStarted","Data":"41724ce8409b7ee028fbc6bf6fbf9a64c56a86f9c6992140745b615c717e9b63"} Nov 25 14:38:15 crc kubenswrapper[4879]: I1125 14:38:15.933382 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-9g2fw" podStartSLOduration=1.922779486 podStartE2EDuration="9.933350186s" podCreationTimestamp="2025-11-25 14:38:06 +0000 UTC" firstStartedPulling="2025-11-25 14:38:07.15534925 +0000 UTC m=+778.758762321" lastFinishedPulling="2025-11-25 14:38:15.16591981 +0000 UTC m=+786.769333021" observedRunningTime="2025-11-25 14:38:15.92061066 +0000 UTC m=+787.524023761" watchObservedRunningTime="2025-11-25 14:38:15.933350186 +0000 UTC m=+787.536763277" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.063570 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.064197 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.072790 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-f4xpk" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.073835 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.409452 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.409550 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.926807 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6bb5cd776f-svswp" Nov 25 14:38:17 crc kubenswrapper[4879]: I1125 14:38:17.997541 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-r4d2h"] Nov 25 14:38:26 crc kubenswrapper[4879]: I1125 14:38:26.727349 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-mxnc7" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.056035 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr"] Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.058429 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.061774 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.062191 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr"] Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.172273 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdh7h\" (UniqueName: \"kubernetes.io/projected/adde5dde-ce0c-484c-b4e4-7326b167e712-kube-api-access-xdh7h\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.172341 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.172480 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.273659 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.273809 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.273876 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdh7h\" (UniqueName: \"kubernetes.io/projected/adde5dde-ce0c-484c-b4e4-7326b167e712-kube-api-access-xdh7h\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.274700 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.274761 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.297583 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdh7h\" (UniqueName: \"kubernetes.io/projected/adde5dde-ce0c-484c-b4e4-7326b167e712-kube-api-access-xdh7h\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.380768 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:38:39 crc kubenswrapper[4879]: I1125 14:38:39.851741 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr"] Nov 25 14:38:40 crc kubenswrapper[4879]: I1125 14:38:40.045376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" event={"ID":"adde5dde-ce0c-484c-b4e4-7326b167e712","Type":"ContainerStarted","Data":"4b662e7c85f31583e41de40b7b5dbbeed93a930af2d4ac50b7df8a38cf71b4a1"} Nov 25 14:38:43 crc kubenswrapper[4879]: I1125 14:38:43.053664 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-r4d2h" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" containerID="cri-o://f3d352a9412850e5e9af693db40c56ed6a8d9bd30d0f2983ad3410383f6c13ad" gracePeriod=15 Nov 25 14:38:47 crc kubenswrapper[4879]: I1125 14:38:47.409214 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:38:47 crc kubenswrapper[4879]: I1125 14:38:47.410028 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:38:47 crc kubenswrapper[4879]: I1125 14:38:47.410359 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:38:47 crc kubenswrapper[4879]: I1125 14:38:47.411532 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"10f38710f0a2009721f3eed24465e5c8e6ced367d4a502ca16764d82d8c5b0b2"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:38:47 crc kubenswrapper[4879]: I1125 14:38:47.411680 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://10f38710f0a2009721f3eed24465e5c8e6ced367d4a502ca16764d82d8c5b0b2" gracePeriod=600 Nov 25 14:38:49 crc kubenswrapper[4879]: I1125 14:38:49.800323 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r4d2h_f601aa7e-7179-4ce4-a83f-aa0c324970e0/console/0.log" Nov 25 14:38:49 crc kubenswrapper[4879]: I1125 14:38:49.800713 4879 generic.go:334] "Generic (PLEG): container finished" podID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerID="f3d352a9412850e5e9af693db40c56ed6a8d9bd30d0f2983ad3410383f6c13ad" exitCode=2 Nov 25 14:38:49 crc kubenswrapper[4879]: I1125 14:38:49.800755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r4d2h" event={"ID":"f601aa7e-7179-4ce4-a83f-aa0c324970e0","Type":"ContainerDied","Data":"f3d352a9412850e5e9af693db40c56ed6a8d9bd30d0f2983ad3410383f6c13ad"} Nov 25 14:38:50 crc kubenswrapper[4879]: I1125 14:38:50.807059 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" event={"ID":"adde5dde-ce0c-484c-b4e4-7326b167e712","Type":"ContainerStarted","Data":"1528d107384a896201d869992d3c869e3c5d52be0599bf796decf9739886c608"} Nov 25 14:38:52 crc kubenswrapper[4879]: I1125 14:38:52.133882 4879 patch_prober.go:28] interesting pod/console-f9d7485db-r4d2h container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 25 14:38:52 crc kubenswrapper[4879]: I1125 14:38:52.133964 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-r4d2h" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.279032 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vc6vx"] Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.281114 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.290674 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc6vx"] Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.308073 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r4d2h_f601aa7e-7179-4ce4-a83f-aa0c324970e0/console/0.log" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.308201 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420601 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prmb4\" (UniqueName: \"kubernetes.io/projected/f601aa7e-7179-4ce4-a83f-aa0c324970e0-kube-api-access-prmb4\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420705 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-config\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420763 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-oauth-config\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420801 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-serving-cert\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420862 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-oauth-serving-cert\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420929 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-service-ca\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.420962 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-trusted-ca-bundle\") pod \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\" (UID: \"f601aa7e-7179-4ce4-a83f-aa0c324970e0\") " Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421161 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c2bv\" (UniqueName: \"kubernetes.io/projected/5ad004f5-1b8e-4d5c-834f-37ae507aacac-kube-api-access-8c2bv\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421199 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-catalog-content\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421224 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-utilities\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421306 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-config" (OuterVolumeSpecName: "console-config") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421539 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-service-ca" (OuterVolumeSpecName: "service-ca") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421583 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.421603 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.425924 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.426162 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f601aa7e-7179-4ce4-a83f-aa0c324970e0-kube-api-access-prmb4" (OuterVolumeSpecName: "kube-api-access-prmb4") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "kube-api-access-prmb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.426272 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "f601aa7e-7179-4ce4-a83f-aa0c324970e0" (UID: "f601aa7e-7179-4ce4-a83f-aa0c324970e0"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522531 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c2bv\" (UniqueName: \"kubernetes.io/projected/5ad004f5-1b8e-4d5c-834f-37ae507aacac-kube-api-access-8c2bv\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522604 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-catalog-content\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522631 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-utilities\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522678 4879 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522691 4879 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522701 4879 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522710 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prmb4\" (UniqueName: \"kubernetes.io/projected/f601aa7e-7179-4ce4-a83f-aa0c324970e0-kube-api-access-prmb4\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522853 4879 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522869 4879 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.522880 4879 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f601aa7e-7179-4ce4-a83f-aa0c324970e0-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.523429 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-utilities\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.523864 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-catalog-content\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.542977 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c2bv\" (UniqueName: \"kubernetes.io/projected/5ad004f5-1b8e-4d5c-834f-37ae507aacac-kube-api-access-8c2bv\") pod \"redhat-marketplace-vc6vx\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.618155 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:38:53 crc kubenswrapper[4879]: I1125 14:38:53.840179 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc6vx"] Nov 25 14:38:54 crc kubenswrapper[4879]: I1125 14:38:54.531940 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="10f38710f0a2009721f3eed24465e5c8e6ced367d4a502ca16764d82d8c5b0b2" exitCode=0 Nov 25 14:38:54 crc kubenswrapper[4879]: I1125 14:38:54.532030 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"10f38710f0a2009721f3eed24465e5c8e6ced367d4a502ca16764d82d8c5b0b2"} Nov 25 14:38:54 crc kubenswrapper[4879]: I1125 14:38:54.533104 4879 scope.go:117] "RemoveContainer" containerID="15edc185488ff8f343000ea9a8092607411e7b2b070d2b6e62ac471cd7e356eb" Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.541810 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-r4d2h_f601aa7e-7179-4ce4-a83f-aa0c324970e0/console/0.log" Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.541949 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r4d2h" event={"ID":"f601aa7e-7179-4ce4-a83f-aa0c324970e0","Type":"ContainerDied","Data":"3199c3dc60aad498518e34db8e5a84727e2871e4a6a8ffd200ecbd97b03da752"} Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.542043 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r4d2h" Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.544026 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerStarted","Data":"aa58d74955be64509f0f53039bdc850e4df9af8c6decb578542e2a40e5144954"} Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.568735 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-r4d2h"] Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.575709 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-r4d2h"] Nov 25 14:38:55 crc kubenswrapper[4879]: I1125 14:38:55.657099 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" path="/var/lib/kubelet/pods/f601aa7e-7179-4ce4-a83f-aa0c324970e0/volumes" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.467550 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rnllb"] Nov 25 14:38:56 crc kubenswrapper[4879]: E1125 14:38:56.468007 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.468041 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.468264 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f601aa7e-7179-4ce4-a83f-aa0c324970e0" containerName="console" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.469714 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.508978 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rnllb"] Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.551893 4879 generic.go:334] "Generic (PLEG): container finished" podID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerID="1528d107384a896201d869992d3c869e3c5d52be0599bf796decf9739886c608" exitCode=0 Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.551969 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" event={"ID":"adde5dde-ce0c-484c-b4e4-7326b167e712","Type":"ContainerDied","Data":"1528d107384a896201d869992d3c869e3c5d52be0599bf796decf9739886c608"} Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.557836 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerStarted","Data":"c53955aa59b6b37c148e1624560c12d5f6add3facb5e931bea559c52031ff5bc"} Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.665490 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvdng\" (UniqueName: \"kubernetes.io/projected/045dfc75-5e4c-4264-87e9-34962949a257-kube-api-access-bvdng\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.665583 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-utilities\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.665669 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-catalog-content\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.767389 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-catalog-content\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.767523 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvdng\" (UniqueName: \"kubernetes.io/projected/045dfc75-5e4c-4264-87e9-34962949a257-kube-api-access-bvdng\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.767570 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-utilities\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.768004 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-catalog-content\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.768166 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-utilities\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.791774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvdng\" (UniqueName: \"kubernetes.io/projected/045dfc75-5e4c-4264-87e9-34962949a257-kube-api-access-bvdng\") pod \"certified-operators-rnllb\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:56 crc kubenswrapper[4879]: I1125 14:38:56.808050 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:38:57 crc kubenswrapper[4879]: I1125 14:38:57.172861 4879 scope.go:117] "RemoveContainer" containerID="f3d352a9412850e5e9af693db40c56ed6a8d9bd30d0f2983ad3410383f6c13ad" Nov 25 14:38:57 crc kubenswrapper[4879]: I1125 14:38:57.246711 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rnllb"] Nov 25 14:38:57 crc kubenswrapper[4879]: W1125 14:38:57.252145 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod045dfc75_5e4c_4264_87e9_34962949a257.slice/crio-181a9367187c90740decd2095642ba17126396047f2e3edfc70379b6a5eb737b WatchSource:0}: Error finding container 181a9367187c90740decd2095642ba17126396047f2e3edfc70379b6a5eb737b: Status 404 returned error can't find the container with id 181a9367187c90740decd2095642ba17126396047f2e3edfc70379b6a5eb737b Nov 25 14:38:57 crc kubenswrapper[4879]: I1125 14:38:57.569545 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerStarted","Data":"181a9367187c90740decd2095642ba17126396047f2e3edfc70379b6a5eb737b"} Nov 25 14:38:58 crc kubenswrapper[4879]: I1125 14:38:58.580537 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerStarted","Data":"cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745"} Nov 25 14:38:58 crc kubenswrapper[4879]: I1125 14:38:58.582358 4879 generic.go:334] "Generic (PLEG): container finished" podID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerID="c53955aa59b6b37c148e1624560c12d5f6add3facb5e931bea559c52031ff5bc" exitCode=0 Nov 25 14:38:58 crc kubenswrapper[4879]: I1125 14:38:58.582477 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerDied","Data":"c53955aa59b6b37c148e1624560c12d5f6add3facb5e931bea559c52031ff5bc"} Nov 25 14:38:58 crc kubenswrapper[4879]: I1125 14:38:58.587194 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"46313fa375227d4de77ccdffe103fa35d82fc54f0a327407b56caa844d260bef"} Nov 25 14:38:59 crc kubenswrapper[4879]: I1125 14:38:59.600033 4879 generic.go:334] "Generic (PLEG): container finished" podID="045dfc75-5e4c-4264-87e9-34962949a257" containerID="cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745" exitCode=0 Nov 25 14:38:59 crc kubenswrapper[4879]: I1125 14:38:59.600213 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerDied","Data":"cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745"} Nov 25 14:39:06 crc kubenswrapper[4879]: I1125 14:39:06.682090 4879 generic.go:334] "Generic (PLEG): container finished" podID="045dfc75-5e4c-4264-87e9-34962949a257" containerID="8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd" exitCode=0 Nov 25 14:39:06 crc kubenswrapper[4879]: I1125 14:39:06.682329 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerDied","Data":"8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd"} Nov 25 14:39:06 crc kubenswrapper[4879]: I1125 14:39:06.689253 4879 generic.go:334] "Generic (PLEG): container finished" podID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerID="190e16c1466a1f2320007c66ef66d4fd80cecedb73289042cc0b3dabd8e7bbd5" exitCode=0 Nov 25 14:39:06 crc kubenswrapper[4879]: I1125 14:39:06.689362 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerDied","Data":"190e16c1466a1f2320007c66ef66d4fd80cecedb73289042cc0b3dabd8e7bbd5"} Nov 25 14:39:06 crc kubenswrapper[4879]: I1125 14:39:06.693067 4879 generic.go:334] "Generic (PLEG): container finished" podID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerID="1d6b1cf94f82ecf9891886af1fc4668048be37984abf181999c0f124e2827375" exitCode=0 Nov 25 14:39:06 crc kubenswrapper[4879]: I1125 14:39:06.693292 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" event={"ID":"adde5dde-ce0c-484c-b4e4-7326b167e712","Type":"ContainerDied","Data":"1d6b1cf94f82ecf9891886af1fc4668048be37984abf181999c0f124e2827375"} Nov 25 14:39:07 crc kubenswrapper[4879]: I1125 14:39:07.700718 4879 generic.go:334] "Generic (PLEG): container finished" podID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerID="8b3d6879ff4232aeeb760ae1a01486122659ec02b82e6368b733dcd332bcc394" exitCode=0 Nov 25 14:39:07 crc kubenswrapper[4879]: I1125 14:39:07.700798 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" event={"ID":"adde5dde-ce0c-484c-b4e4-7326b167e712","Type":"ContainerDied","Data":"8b3d6879ff4232aeeb760ae1a01486122659ec02b82e6368b733dcd332bcc394"} Nov 25 14:39:08 crc kubenswrapper[4879]: I1125 14:39:08.708958 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerStarted","Data":"2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463"} Nov 25 14:39:08 crc kubenswrapper[4879]: I1125 14:39:08.982780 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:39:08 crc kubenswrapper[4879]: I1125 14:39:08.999860 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rnllb" podStartSLOduration=4.450956648 podStartE2EDuration="12.999825534s" podCreationTimestamp="2025-11-25 14:38:56 +0000 UTC" firstStartedPulling="2025-11-25 14:38:59.602915462 +0000 UTC m=+831.206328533" lastFinishedPulling="2025-11-25 14:39:08.151784348 +0000 UTC m=+839.755197419" observedRunningTime="2025-11-25 14:39:08.726889542 +0000 UTC m=+840.330302623" watchObservedRunningTime="2025-11-25 14:39:08.999825534 +0000 UTC m=+840.603238605" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.134516 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-bundle\") pod \"adde5dde-ce0c-484c-b4e4-7326b167e712\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.134637 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdh7h\" (UniqueName: \"kubernetes.io/projected/adde5dde-ce0c-484c-b4e4-7326b167e712-kube-api-access-xdh7h\") pod \"adde5dde-ce0c-484c-b4e4-7326b167e712\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.134781 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-util\") pod \"adde5dde-ce0c-484c-b4e4-7326b167e712\" (UID: \"adde5dde-ce0c-484c-b4e4-7326b167e712\") " Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.135752 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-bundle" (OuterVolumeSpecName: "bundle") pod "adde5dde-ce0c-484c-b4e4-7326b167e712" (UID: "adde5dde-ce0c-484c-b4e4-7326b167e712"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.145392 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adde5dde-ce0c-484c-b4e4-7326b167e712-kube-api-access-xdh7h" (OuterVolumeSpecName: "kube-api-access-xdh7h") pod "adde5dde-ce0c-484c-b4e4-7326b167e712" (UID: "adde5dde-ce0c-484c-b4e4-7326b167e712"). InnerVolumeSpecName "kube-api-access-xdh7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.147032 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-util" (OuterVolumeSpecName: "util") pod "adde5dde-ce0c-484c-b4e4-7326b167e712" (UID: "adde5dde-ce0c-484c-b4e4-7326b167e712"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.235926 4879 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-util\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.235963 4879 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/adde5dde-ce0c-484c-b4e4-7326b167e712-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.235977 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdh7h\" (UniqueName: \"kubernetes.io/projected/adde5dde-ce0c-484c-b4e4-7326b167e712-kube-api-access-xdh7h\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.717029 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.717019 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr" event={"ID":"adde5dde-ce0c-484c-b4e4-7326b167e712","Type":"ContainerDied","Data":"4b662e7c85f31583e41de40b7b5dbbeed93a930af2d4ac50b7df8a38cf71b4a1"} Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.717463 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b662e7c85f31583e41de40b7b5dbbeed93a930af2d4ac50b7df8a38cf71b4a1" Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.719274 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerStarted","Data":"f849b27c9db49de8f29f345408ad93006e584a7f694d8717c3f8cd1b9fcf090d"} Nov 25 14:39:09 crc kubenswrapper[4879]: I1125 14:39:09.737717 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vc6vx" podStartSLOduration=6.182263171 podStartE2EDuration="16.73770218s" podCreationTimestamp="2025-11-25 14:38:53 +0000 UTC" firstStartedPulling="2025-11-25 14:38:58.587179963 +0000 UTC m=+830.190593034" lastFinishedPulling="2025-11-25 14:39:09.142618972 +0000 UTC m=+840.746032043" observedRunningTime="2025-11-25 14:39:09.735030694 +0000 UTC m=+841.338443765" watchObservedRunningTime="2025-11-25 14:39:09.73770218 +0000 UTC m=+841.341115251" Nov 25 14:39:13 crc kubenswrapper[4879]: I1125 14:39:13.619032 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:39:13 crc kubenswrapper[4879]: I1125 14:39:13.619559 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:39:13 crc kubenswrapper[4879]: I1125 14:39:13.654904 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:39:16 crc kubenswrapper[4879]: I1125 14:39:16.809612 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:39:16 crc kubenswrapper[4879]: I1125 14:39:16.809918 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:39:16 crc kubenswrapper[4879]: I1125 14:39:16.850613 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:39:17 crc kubenswrapper[4879]: I1125 14:39:17.795248 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:39:19 crc kubenswrapper[4879]: I1125 14:39:19.091469 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rnllb"] Nov 25 14:39:19 crc kubenswrapper[4879]: I1125 14:39:19.769864 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rnllb" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="registry-server" containerID="cri-o://2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463" gracePeriod=2 Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.336176 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.413723 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-catalog-content\") pod \"045dfc75-5e4c-4264-87e9-34962949a257\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.413843 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-utilities\") pod \"045dfc75-5e4c-4264-87e9-34962949a257\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.413868 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvdng\" (UniqueName: \"kubernetes.io/projected/045dfc75-5e4c-4264-87e9-34962949a257-kube-api-access-bvdng\") pod \"045dfc75-5e4c-4264-87e9-34962949a257\" (UID: \"045dfc75-5e4c-4264-87e9-34962949a257\") " Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.414752 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-utilities" (OuterVolumeSpecName: "utilities") pod "045dfc75-5e4c-4264-87e9-34962949a257" (UID: "045dfc75-5e4c-4264-87e9-34962949a257"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.427506 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/045dfc75-5e4c-4264-87e9-34962949a257-kube-api-access-bvdng" (OuterVolumeSpecName: "kube-api-access-bvdng") pod "045dfc75-5e4c-4264-87e9-34962949a257" (UID: "045dfc75-5e4c-4264-87e9-34962949a257"). InnerVolumeSpecName "kube-api-access-bvdng". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.470861 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "045dfc75-5e4c-4264-87e9-34962949a257" (UID: "045dfc75-5e4c-4264-87e9-34962949a257"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.514843 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.514875 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvdng\" (UniqueName: \"kubernetes.io/projected/045dfc75-5e4c-4264-87e9-34962949a257-kube-api-access-bvdng\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.514885 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/045dfc75-5e4c-4264-87e9-34962949a257-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.783005 4879 generic.go:334] "Generic (PLEG): container finished" podID="045dfc75-5e4c-4264-87e9-34962949a257" containerID="2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463" exitCode=0 Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.783096 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerDied","Data":"2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463"} Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.783280 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rnllb" event={"ID":"045dfc75-5e4c-4264-87e9-34962949a257","Type":"ContainerDied","Data":"181a9367187c90740decd2095642ba17126396047f2e3edfc70379b6a5eb737b"} Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.783310 4879 scope.go:117] "RemoveContainer" containerID="2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.783548 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rnllb" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.804576 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rnllb"] Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.805395 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rnllb"] Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.813644 4879 scope.go:117] "RemoveContainer" containerID="8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.833922 4879 scope.go:117] "RemoveContainer" containerID="cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.870801 4879 scope.go:117] "RemoveContainer" containerID="2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463" Nov 25 14:39:21 crc kubenswrapper[4879]: E1125 14:39:21.871215 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463\": container with ID starting with 2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463 not found: ID does not exist" containerID="2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.871251 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463"} err="failed to get container status \"2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463\": rpc error: code = NotFound desc = could not find container \"2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463\": container with ID starting with 2f84d85361de881aa09403cf71487945a3cd72350c1cd5746c205007a5adf463 not found: ID does not exist" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.871272 4879 scope.go:117] "RemoveContainer" containerID="8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd" Nov 25 14:39:21 crc kubenswrapper[4879]: E1125 14:39:21.871478 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd\": container with ID starting with 8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd not found: ID does not exist" containerID="8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.871495 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd"} err="failed to get container status \"8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd\": rpc error: code = NotFound desc = could not find container \"8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd\": container with ID starting with 8869767d035b098580174316f12314d12291ab98e1a06109c7e69eaa60199efd not found: ID does not exist" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.871513 4879 scope.go:117] "RemoveContainer" containerID="cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745" Nov 25 14:39:21 crc kubenswrapper[4879]: E1125 14:39:21.871715 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745\": container with ID starting with cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745 not found: ID does not exist" containerID="cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745" Nov 25 14:39:21 crc kubenswrapper[4879]: I1125 14:39:21.871734 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745"} err="failed to get container status \"cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745\": rpc error: code = NotFound desc = could not find container \"cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745\": container with ID starting with cfa73768032d33edb698dea4c8b566bb4c3c85648ae5af87ff1f246387d01745 not found: ID does not exist" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.263436 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p"] Nov 25 14:39:22 crc kubenswrapper[4879]: E1125 14:39:22.264230 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="registry-server" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264247 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="registry-server" Nov 25 14:39:22 crc kubenswrapper[4879]: E1125 14:39:22.264269 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="extract-content" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264275 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="extract-content" Nov 25 14:39:22 crc kubenswrapper[4879]: E1125 14:39:22.264289 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="extract-utilities" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264297 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="extract-utilities" Nov 25 14:39:22 crc kubenswrapper[4879]: E1125 14:39:22.264305 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="pull" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264311 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="pull" Nov 25 14:39:22 crc kubenswrapper[4879]: E1125 14:39:22.264329 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="util" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264335 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="util" Nov 25 14:39:22 crc kubenswrapper[4879]: E1125 14:39:22.264354 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="extract" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264360 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="extract" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264548 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="adde5dde-ce0c-484c-b4e4-7326b167e712" containerName="extract" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.264584 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="045dfc75-5e4c-4264-87e9-34962949a257" containerName="registry-server" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.273624 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.278032 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-gmkt6" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.278286 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.278399 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.278505 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.282682 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.325945 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1bf9f1b8-1476-4f3a-963b-986a0ae66426-webhook-cert\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.326037 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw2sd\" (UniqueName: \"kubernetes.io/projected/1bf9f1b8-1476-4f3a-963b-986a0ae66426-kube-api-access-jw2sd\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.326156 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1bf9f1b8-1476-4f3a-963b-986a0ae66426-apiservice-cert\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.326743 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p"] Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.427509 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1bf9f1b8-1476-4f3a-963b-986a0ae66426-apiservice-cert\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.427590 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1bf9f1b8-1476-4f3a-963b-986a0ae66426-webhook-cert\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.427619 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw2sd\" (UniqueName: \"kubernetes.io/projected/1bf9f1b8-1476-4f3a-963b-986a0ae66426-kube-api-access-jw2sd\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.433876 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1bf9f1b8-1476-4f3a-963b-986a0ae66426-apiservice-cert\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.436555 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1bf9f1b8-1476-4f3a-963b-986a0ae66426-webhook-cert\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.452719 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw2sd\" (UniqueName: \"kubernetes.io/projected/1bf9f1b8-1476-4f3a-963b-986a0ae66426-kube-api-access-jw2sd\") pod \"metallb-operator-controller-manager-7965d46465-b9w8p\" (UID: \"1bf9f1b8-1476-4f3a-963b-986a0ae66426\") " pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.600337 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.713629 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps"] Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.714484 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.721305 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.721341 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.721974 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-5ks9l" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.740766 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps"] Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.832796 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-webhook-cert\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.833149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-apiservice-cert\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.833208 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vz24\" (UniqueName: \"kubernetes.io/projected/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-kube-api-access-4vz24\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.929458 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p"] Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.935099 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vz24\" (UniqueName: \"kubernetes.io/projected/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-kube-api-access-4vz24\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.935187 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-webhook-cert\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.935222 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-apiservice-cert\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.952408 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-webhook-cert\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.953427 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-apiservice-cert\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:22 crc kubenswrapper[4879]: I1125 14:39:22.959247 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vz24\" (UniqueName: \"kubernetes.io/projected/d5400cd5-7909-4b72-92d6-f70a5f6cab2f-kube-api-access-4vz24\") pod \"metallb-operator-webhook-server-b98dc7cc7-s6dps\" (UID: \"d5400cd5-7909-4b72-92d6-f70a5f6cab2f\") " pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:23 crc kubenswrapper[4879]: I1125 14:39:23.073271 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:23 crc kubenswrapper[4879]: I1125 14:39:23.520806 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps"] Nov 25 14:39:23 crc kubenswrapper[4879]: I1125 14:39:23.655203 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="045dfc75-5e4c-4264-87e9-34962949a257" path="/var/lib/kubelet/pods/045dfc75-5e4c-4264-87e9-34962949a257/volumes" Nov 25 14:39:23 crc kubenswrapper[4879]: I1125 14:39:23.669529 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:39:23 crc kubenswrapper[4879]: I1125 14:39:23.805669 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerStarted","Data":"91235c5f2a44559d4528d726dd1aa0cca7ff3c84e880fa5c78fba7aa688f455b"} Nov 25 14:39:23 crc kubenswrapper[4879]: I1125 14:39:23.806620 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" event={"ID":"d5400cd5-7909-4b72-92d6-f70a5f6cab2f","Type":"ContainerStarted","Data":"97ac0ab6ed687c174173f93aa4f4f9e567701e983aee7c5da4cba3cfc91da557"} Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.481529 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc6vx"] Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.482168 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vc6vx" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="registry-server" containerID="cri-o://f849b27c9db49de8f29f345408ad93006e584a7f694d8717c3f8cd1b9fcf090d" gracePeriod=2 Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.833738 4879 generic.go:334] "Generic (PLEG): container finished" podID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerID="f849b27c9db49de8f29f345408ad93006e584a7f694d8717c3f8cd1b9fcf090d" exitCode=0 Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.833810 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerDied","Data":"f849b27c9db49de8f29f345408ad93006e584a7f694d8717c3f8cd1b9fcf090d"} Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.956316 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.988937 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-catalog-content\") pod \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.989000 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c2bv\" (UniqueName: \"kubernetes.io/projected/5ad004f5-1b8e-4d5c-834f-37ae507aacac-kube-api-access-8c2bv\") pod \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.989037 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-utilities\") pod \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\" (UID: \"5ad004f5-1b8e-4d5c-834f-37ae507aacac\") " Nov 25 14:39:26 crc kubenswrapper[4879]: I1125 14:39:26.991069 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-utilities" (OuterVolumeSpecName: "utilities") pod "5ad004f5-1b8e-4d5c-834f-37ae507aacac" (UID: "5ad004f5-1b8e-4d5c-834f-37ae507aacac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.002316 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ad004f5-1b8e-4d5c-834f-37ae507aacac-kube-api-access-8c2bv" (OuterVolumeSpecName: "kube-api-access-8c2bv") pod "5ad004f5-1b8e-4d5c-834f-37ae507aacac" (UID: "5ad004f5-1b8e-4d5c-834f-37ae507aacac"). InnerVolumeSpecName "kube-api-access-8c2bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.015531 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ad004f5-1b8e-4d5c-834f-37ae507aacac" (UID: "5ad004f5-1b8e-4d5c-834f-37ae507aacac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.090044 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.090082 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c2bv\" (UniqueName: \"kubernetes.io/projected/5ad004f5-1b8e-4d5c-834f-37ae507aacac-kube-api-access-8c2bv\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.090094 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ad004f5-1b8e-4d5c-834f-37ae507aacac-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.842302 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc6vx" event={"ID":"5ad004f5-1b8e-4d5c-834f-37ae507aacac","Type":"ContainerDied","Data":"aa58d74955be64509f0f53039bdc850e4df9af8c6decb578542e2a40e5144954"} Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.842622 4879 scope.go:117] "RemoveContainer" containerID="f849b27c9db49de8f29f345408ad93006e584a7f694d8717c3f8cd1b9fcf090d" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.842550 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc6vx" Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.870926 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc6vx"] Nov 25 14:39:27 crc kubenswrapper[4879]: I1125 14:39:27.874567 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc6vx"] Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.495159 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 14:39:28 crc kubenswrapper[4879]: E1125 14:39:28.495439 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="extract-utilities" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.495455 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="extract-utilities" Nov 25 14:39:28 crc kubenswrapper[4879]: E1125 14:39:28.495487 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="registry-server" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.495495 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="registry-server" Nov 25 14:39:28 crc kubenswrapper[4879]: E1125 14:39:28.495508 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="extract-content" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.495516 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="extract-content" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.495649 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" containerName="registry-server" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.496502 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.505606 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.611357 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb446\" (UniqueName: \"kubernetes.io/projected/de9b900d-139d-4e55-b2e9-48ca61ee770c-kube-api-access-vb446\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.611414 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-catalog-content\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.611454 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-utilities\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.713068 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb446\" (UniqueName: \"kubernetes.io/projected/de9b900d-139d-4e55-b2e9-48ca61ee770c-kube-api-access-vb446\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.713138 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-catalog-content\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.713265 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-utilities\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.713816 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-utilities\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.713897 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-catalog-content\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.731998 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb446\" (UniqueName: \"kubernetes.io/projected/de9b900d-139d-4e55-b2e9-48ca61ee770c-kube-api-access-vb446\") pod \"community-operators-dgc57\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:28 crc kubenswrapper[4879]: I1125 14:39:28.812467 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:29 crc kubenswrapper[4879]: I1125 14:39:29.668550 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ad004f5-1b8e-4d5c-834f-37ae507aacac" path="/var/lib/kubelet/pods/5ad004f5-1b8e-4d5c-834f-37ae507aacac/volumes" Nov 25 14:39:31 crc kubenswrapper[4879]: I1125 14:39:31.613783 4879 scope.go:117] "RemoveContainer" containerID="190e16c1466a1f2320007c66ef66d4fd80cecedb73289042cc0b3dabd8e7bbd5" Nov 25 14:39:32 crc kubenswrapper[4879]: I1125 14:39:32.352082 4879 scope.go:117] "RemoveContainer" containerID="c53955aa59b6b37c148e1624560c12d5f6add3facb5e931bea559c52031ff5bc" Nov 25 14:39:32 crc kubenswrapper[4879]: I1125 14:39:32.814885 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 14:39:32 crc kubenswrapper[4879]: W1125 14:39:32.942283 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde9b900d_139d_4e55_b2e9_48ca61ee770c.slice/crio-a32f6e41680b0026d5261ef5d91016fb700b0133b3bbbbdc034d432c4f2c758a WatchSource:0}: Error finding container a32f6e41680b0026d5261ef5d91016fb700b0133b3bbbbdc034d432c4f2c758a: Status 404 returned error can't find the container with id a32f6e41680b0026d5261ef5d91016fb700b0133b3bbbbdc034d432c4f2c758a Nov 25 14:39:33 crc kubenswrapper[4879]: I1125 14:39:33.889314 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerStarted","Data":"9bbe5f244d168f99dcc124a1fc0b095a0b5d75bd93006ace9ce81698b753e2f3"} Nov 25 14:39:33 crc kubenswrapper[4879]: I1125 14:39:33.891266 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerStarted","Data":"a32f6e41680b0026d5261ef5d91016fb700b0133b3bbbbdc034d432c4f2c758a"} Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.896560 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" event={"ID":"d5400cd5-7909-4b72-92d6-f70a5f6cab2f","Type":"ContainerStarted","Data":"56f5b17dfef99b3273e1f0d83a7a392b843a19975476b0304415b10e37977350"} Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.897507 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.900029 4879 generic.go:334] "Generic (PLEG): container finished" podID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerID="64d19692f86123741dd1409a6710518668aa34340db16b79795dc427980caafd" exitCode=0 Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.902657 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerDied","Data":"64d19692f86123741dd1409a6710518668aa34340db16b79795dc427980caafd"} Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.902717 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.936524 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" podStartSLOduration=3.909125815 podStartE2EDuration="12.936501585s" podCreationTimestamp="2025-11-25 14:39:22 +0000 UTC" firstStartedPulling="2025-11-25 14:39:23.529414978 +0000 UTC m=+855.132828049" lastFinishedPulling="2025-11-25 14:39:32.556790748 +0000 UTC m=+864.160203819" observedRunningTime="2025-11-25 14:39:34.92910854 +0000 UTC m=+866.532521621" watchObservedRunningTime="2025-11-25 14:39:34.936501585 +0000 UTC m=+866.539914656" Nov 25 14:39:34 crc kubenswrapper[4879]: I1125 14:39:34.952181 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podStartSLOduration=3.35812139 podStartE2EDuration="12.952161588s" podCreationTimestamp="2025-11-25 14:39:22 +0000 UTC" firstStartedPulling="2025-11-25 14:39:22.961550307 +0000 UTC m=+854.564963378" lastFinishedPulling="2025-11-25 14:39:32.555590505 +0000 UTC m=+864.159003576" observedRunningTime="2025-11-25 14:39:34.947603972 +0000 UTC m=+866.551017043" watchObservedRunningTime="2025-11-25 14:39:34.952161588 +0000 UTC m=+866.555574659" Nov 25 14:39:40 crc kubenswrapper[4879]: I1125 14:39:40.958097 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerStarted","Data":"095d5d322f205bfe1ff4631ce6a377f7f0e2457fe20c08611104880d04141e31"} Nov 25 14:39:41 crc kubenswrapper[4879]: I1125 14:39:41.965177 4879 generic.go:334] "Generic (PLEG): container finished" podID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerID="095d5d322f205bfe1ff4631ce6a377f7f0e2457fe20c08611104880d04141e31" exitCode=0 Nov 25 14:39:41 crc kubenswrapper[4879]: I1125 14:39:41.965237 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerDied","Data":"095d5d322f205bfe1ff4631ce6a377f7f0e2457fe20c08611104880d04141e31"} Nov 25 14:39:43 crc kubenswrapper[4879]: I1125 14:39:43.082845 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-b98dc7cc7-s6dps" Nov 25 14:39:43 crc kubenswrapper[4879]: I1125 14:39:43.983641 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerStarted","Data":"23a4ee28429b9389e47a7ad3e90d47efd3255ea4a9ab88962b18d9c588c9f31a"} Nov 25 14:39:45 crc kubenswrapper[4879]: I1125 14:39:45.008558 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dgc57" podStartSLOduration=9.130757817 podStartE2EDuration="17.008533324s" podCreationTimestamp="2025-11-25 14:39:28 +0000 UTC" firstStartedPulling="2025-11-25 14:39:34.903923243 +0000 UTC m=+866.507336314" lastFinishedPulling="2025-11-25 14:39:42.78169874 +0000 UTC m=+874.385111821" observedRunningTime="2025-11-25 14:39:45.0044007 +0000 UTC m=+876.607813771" watchObservedRunningTime="2025-11-25 14:39:45.008533324 +0000 UTC m=+876.611946395" Nov 25 14:39:50 crc kubenswrapper[4879]: I1125 14:39:48.812642 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:50 crc kubenswrapper[4879]: I1125 14:39:48.812686 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:50 crc kubenswrapper[4879]: I1125 14:39:48.865717 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:50 crc kubenswrapper[4879]: I1125 14:39:49.090450 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dgc57" Nov 25 14:39:50 crc kubenswrapper[4879]: I1125 14:39:50.931834 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 14:39:51 crc kubenswrapper[4879]: I1125 14:39:51.281766 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d6t8h"] Nov 25 14:39:51 crc kubenswrapper[4879]: I1125 14:39:51.282048 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d6t8h" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="registry-server" containerID="cri-o://701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" gracePeriod=2 Nov 25 14:39:51 crc kubenswrapper[4879]: E1125 14:39:51.692922 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb is running failed: container process not found" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 14:39:51 crc kubenswrapper[4879]: E1125 14:39:51.693980 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb is running failed: container process not found" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 14:39:51 crc kubenswrapper[4879]: E1125 14:39:51.694438 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb is running failed: container process not found" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 14:39:51 crc kubenswrapper[4879]: E1125 14:39:51.694557 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-d6t8h" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="registry-server" Nov 25 14:39:51 crc kubenswrapper[4879]: I1125 14:39:51.871613 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.024362 4879 generic.go:334] "Generic (PLEG): container finished" podID="89fc34a9-051c-46cb-b745-879d6c018542" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" exitCode=0 Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.024415 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerDied","Data":"701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb"} Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.024427 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d6t8h" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.024458 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d6t8h" event={"ID":"89fc34a9-051c-46cb-b745-879d6c018542","Type":"ContainerDied","Data":"3a1f5ab4f07534f0c731d012b8852aa0a1a2ad5eb589121b9ded4e5635d2e8d4"} Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.024480 4879 scope.go:117] "RemoveContainer" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.037768 4879 scope.go:117] "RemoveContainer" containerID="e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.037879 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-catalog-content\") pod \"89fc34a9-051c-46cb-b745-879d6c018542\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.037974 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stdv8\" (UniqueName: \"kubernetes.io/projected/89fc34a9-051c-46cb-b745-879d6c018542-kube-api-access-stdv8\") pod \"89fc34a9-051c-46cb-b745-879d6c018542\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.038020 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-utilities\") pod \"89fc34a9-051c-46cb-b745-879d6c018542\" (UID: \"89fc34a9-051c-46cb-b745-879d6c018542\") " Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.038916 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-utilities" (OuterVolumeSpecName: "utilities") pod "89fc34a9-051c-46cb-b745-879d6c018542" (UID: "89fc34a9-051c-46cb-b745-879d6c018542"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.051320 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89fc34a9-051c-46cb-b745-879d6c018542-kube-api-access-stdv8" (OuterVolumeSpecName: "kube-api-access-stdv8") pod "89fc34a9-051c-46cb-b745-879d6c018542" (UID: "89fc34a9-051c-46cb-b745-879d6c018542"). InnerVolumeSpecName "kube-api-access-stdv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.084509 4879 scope.go:117] "RemoveContainer" containerID="47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.091334 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89fc34a9-051c-46cb-b745-879d6c018542" (UID: "89fc34a9-051c-46cb-b745-879d6c018542"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.100059 4879 scope.go:117] "RemoveContainer" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" Nov 25 14:39:52 crc kubenswrapper[4879]: E1125 14:39:52.100627 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb\": container with ID starting with 701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb not found: ID does not exist" containerID="701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.100661 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb"} err="failed to get container status \"701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb\": rpc error: code = NotFound desc = could not find container \"701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb\": container with ID starting with 701e42a9249b7eeda950bd554483c66deefbc9a948dd2fac37e862112207b3cb not found: ID does not exist" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.100684 4879 scope.go:117] "RemoveContainer" containerID="e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507" Nov 25 14:39:52 crc kubenswrapper[4879]: E1125 14:39:52.101077 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507\": container with ID starting with e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507 not found: ID does not exist" containerID="e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.101141 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507"} err="failed to get container status \"e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507\": rpc error: code = NotFound desc = could not find container \"e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507\": container with ID starting with e84d1c54f58d85533ca6ac0be86d9e0e0901276cbc33a42d3d521a902a626507 not found: ID does not exist" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.101166 4879 scope.go:117] "RemoveContainer" containerID="47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2" Nov 25 14:39:52 crc kubenswrapper[4879]: E1125 14:39:52.101669 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2\": container with ID starting with 47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2 not found: ID does not exist" containerID="47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.101697 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2"} err="failed to get container status \"47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2\": rpc error: code = NotFound desc = could not find container \"47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2\": container with ID starting with 47ec6d53d878a413b3faa89461110d3d51941e4ea0a42f0410988115e91ef7f2 not found: ID does not exist" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.139427 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stdv8\" (UniqueName: \"kubernetes.io/projected/89fc34a9-051c-46cb-b745-879d6c018542-kube-api-access-stdv8\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.139474 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.139484 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89fc34a9-051c-46cb-b745-879d6c018542-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.352381 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d6t8h"] Nov 25 14:39:52 crc kubenswrapper[4879]: I1125 14:39:52.357017 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d6t8h"] Nov 25 14:39:53 crc kubenswrapper[4879]: I1125 14:39:53.651608 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89fc34a9-051c-46cb-b745-879d6c018542" path="/var/lib/kubelet/pods/89fc34a9-051c-46cb-b745-879d6c018542/volumes" Nov 25 14:40:12 crc kubenswrapper[4879]: I1125 14:40:12.603534 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.332612 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-vx7w9"] Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.332900 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="registry-server" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.332923 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="registry-server" Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.332951 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="extract-utilities" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.332962 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="extract-utilities" Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.332973 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="extract-content" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.332980 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="extract-content" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.333105 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="89fc34a9-051c-46cb-b745-879d6c018542" containerName="registry-server" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.335607 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.338884 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-n96r6" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.338939 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.339061 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.341845 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-29xht"] Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.342955 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.345865 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.365809 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-29xht"] Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.455315 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-45g57"] Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.456472 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.458821 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.459422 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.459673 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.459942 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-926tr" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.509741 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-5rg2m"] Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.509895 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-conf\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.509973 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwtph\" (UniqueName: \"kubernetes.io/projected/043cbac7-0f97-43ed-a287-4af89ceaf905-kube-api-access-nwtph\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510002 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-reloader\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510062 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-metrics-certs\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510174 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96k4p\" (UniqueName: \"kubernetes.io/projected/b11c1717-f8f4-4838-acc9-7dc492d69268-kube-api-access-96k4p\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510199 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510234 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-metrics\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510267 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/043cbac7-0f97-43ed-a287-4af89ceaf905-metallb-excludel2\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510293 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b11c1717-f8f4-4838-acc9-7dc492d69268-metrics-certs\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510313 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b2f76a4-9bca-41a3-9be6-48dd06986803-cert\") pod \"frr-k8s-webhook-server-6998585d5-29xht\" (UID: \"6b2f76a4-9bca-41a3-9be6-48dd06986803\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510331 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w5gm\" (UniqueName: \"kubernetes.io/projected/6b2f76a4-9bca-41a3-9be6-48dd06986803-kube-api-access-5w5gm\") pod \"frr-k8s-webhook-server-6998585d5-29xht\" (UID: \"6b2f76a4-9bca-41a3-9be6-48dd06986803\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510361 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-sockets\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510386 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-startup\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.510697 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.530972 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.533861 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-5rg2m"] Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.611965 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96k4p\" (UniqueName: \"kubernetes.io/projected/b11c1717-f8f4-4838-acc9-7dc492d69268-kube-api-access-96k4p\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612007 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612035 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-metrics\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612058 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/043cbac7-0f97-43ed-a287-4af89ceaf905-metallb-excludel2\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612077 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b11c1717-f8f4-4838-acc9-7dc492d69268-metrics-certs\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612094 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b2f76a4-9bca-41a3-9be6-48dd06986803-cert\") pod \"frr-k8s-webhook-server-6998585d5-29xht\" (UID: \"6b2f76a4-9bca-41a3-9be6-48dd06986803\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612114 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w5gm\" (UniqueName: \"kubernetes.io/projected/6b2f76a4-9bca-41a3-9be6-48dd06986803-kube-api-access-5w5gm\") pod \"frr-k8s-webhook-server-6998585d5-29xht\" (UID: \"6b2f76a4-9bca-41a3-9be6-48dd06986803\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612152 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-sockets\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612175 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-startup\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612191 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-conf\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612207 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwtph\" (UniqueName: \"kubernetes.io/projected/043cbac7-0f97-43ed-a287-4af89ceaf905-kube-api-access-nwtph\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612225 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-reloader\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.612245 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-metrics-certs\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.613062 4879 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.613113 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist podName:043cbac7-0f97-43ed-a287-4af89ceaf905 nodeName:}" failed. No retries permitted until 2025-11-25 14:40:14.113095512 +0000 UTC m=+905.716508583 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist") pod "speaker-45g57" (UID: "043cbac7-0f97-43ed-a287-4af89ceaf905") : secret "metallb-memberlist" not found Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.613427 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-sockets\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.613902 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-reloader\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.613974 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-metrics\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.614066 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-conf\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.614204 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b11c1717-f8f4-4838-acc9-7dc492d69268-frr-startup\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.614383 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/043cbac7-0f97-43ed-a287-4af89ceaf905-metallb-excludel2\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.620760 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b11c1717-f8f4-4838-acc9-7dc492d69268-metrics-certs\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.620776 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b2f76a4-9bca-41a3-9be6-48dd06986803-cert\") pod \"frr-k8s-webhook-server-6998585d5-29xht\" (UID: \"6b2f76a4-9bca-41a3-9be6-48dd06986803\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.621081 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-metrics-certs\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.658081 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwtph\" (UniqueName: \"kubernetes.io/projected/043cbac7-0f97-43ed-a287-4af89ceaf905-kube-api-access-nwtph\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.659875 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w5gm\" (UniqueName: \"kubernetes.io/projected/6b2f76a4-9bca-41a3-9be6-48dd06986803-kube-api-access-5w5gm\") pod \"frr-k8s-webhook-server-6998585d5-29xht\" (UID: \"6b2f76a4-9bca-41a3-9be6-48dd06986803\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.660813 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96k4p\" (UniqueName: \"kubernetes.io/projected/b11c1717-f8f4-4838-acc9-7dc492d69268-kube-api-access-96k4p\") pod \"frr-k8s-vx7w9\" (UID: \"b11c1717-f8f4-4838-acc9-7dc492d69268\") " pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.661206 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.714969 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk46h\" (UniqueName: \"kubernetes.io/projected/4531c2f9-6d64-4cdd-8546-a4494fcdf027-kube-api-access-kk46h\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.715182 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-cert\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.715239 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-metrics-certs\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.816065 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-metrics-certs\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.816506 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk46h\" (UniqueName: \"kubernetes.io/projected/4531c2f9-6d64-4cdd-8546-a4494fcdf027-kube-api-access-kk46h\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.816286 4879 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Nov 25 14:40:13 crc kubenswrapper[4879]: E1125 14:40:13.816603 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-metrics-certs podName:4531c2f9-6d64-4cdd-8546-a4494fcdf027 nodeName:}" failed. No retries permitted until 2025-11-25 14:40:14.316582195 +0000 UTC m=+905.919995266 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-metrics-certs") pod "controller-6c7b4b5f48-5rg2m" (UID: "4531c2f9-6d64-4cdd-8546-a4494fcdf027") : secret "controller-certs-secret" not found Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.816530 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-cert\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.821915 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.835660 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-cert\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.844324 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk46h\" (UniqueName: \"kubernetes.io/projected/4531c2f9-6d64-4cdd-8546-a4494fcdf027-kube-api-access-kk46h\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.908854 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-29xht"] Nov 25 14:40:13 crc kubenswrapper[4879]: W1125 14:40:13.926608 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b2f76a4_9bca_41a3_9be6_48dd06986803.slice/crio-c0ac70264cabdddd70c3d29dcf8e08d2912497a71daececa4160d633646cc510 WatchSource:0}: Error finding container c0ac70264cabdddd70c3d29dcf8e08d2912497a71daececa4160d633646cc510: Status 404 returned error can't find the container with id c0ac70264cabdddd70c3d29dcf8e08d2912497a71daececa4160d633646cc510 Nov 25 14:40:13 crc kubenswrapper[4879]: I1125 14:40:13.953908 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:14 crc kubenswrapper[4879]: I1125 14:40:14.121442 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:14 crc kubenswrapper[4879]: E1125 14:40:14.121648 4879 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 25 14:40:14 crc kubenswrapper[4879]: E1125 14:40:14.121693 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist podName:043cbac7-0f97-43ed-a287-4af89ceaf905 nodeName:}" failed. No retries permitted until 2025-11-25 14:40:15.12167892 +0000 UTC m=+906.725091991 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist") pod "speaker-45g57" (UID: "043cbac7-0f97-43ed-a287-4af89ceaf905") : secret "metallb-memberlist" not found Nov 25 14:40:14 crc kubenswrapper[4879]: I1125 14:40:14.171373 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" event={"ID":"6b2f76a4-9bca-41a3-9be6-48dd06986803","Type":"ContainerStarted","Data":"c0ac70264cabdddd70c3d29dcf8e08d2912497a71daececa4160d633646cc510"} Nov 25 14:40:14 crc kubenswrapper[4879]: I1125 14:40:14.324137 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-metrics-certs\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:14 crc kubenswrapper[4879]: I1125 14:40:14.330400 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4531c2f9-6d64-4cdd-8546-a4494fcdf027-metrics-certs\") pod \"controller-6c7b4b5f48-5rg2m\" (UID: \"4531c2f9-6d64-4cdd-8546-a4494fcdf027\") " pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:14 crc kubenswrapper[4879]: I1125 14:40:14.424665 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:14 crc kubenswrapper[4879]: I1125 14:40:14.829022 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-5rg2m"] Nov 25 14:40:15 crc kubenswrapper[4879]: I1125 14:40:15.134074 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:15 crc kubenswrapper[4879]: E1125 14:40:15.134314 4879 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 25 14:40:15 crc kubenswrapper[4879]: E1125 14:40:15.134560 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist podName:043cbac7-0f97-43ed-a287-4af89ceaf905 nodeName:}" failed. No retries permitted until 2025-11-25 14:40:17.134543084 +0000 UTC m=+908.737956155 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist") pod "speaker-45g57" (UID: "043cbac7-0f97-43ed-a287-4af89ceaf905") : secret "metallb-memberlist" not found Nov 25 14:40:15 crc kubenswrapper[4879]: I1125 14:40:15.186792 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-5rg2m" event={"ID":"4531c2f9-6d64-4cdd-8546-a4494fcdf027","Type":"ContainerStarted","Data":"d04134bb0b0f40318488cb277edcf5ae583edf3f9316743a971bfb408ccf8206"} Nov 25 14:40:15 crc kubenswrapper[4879]: I1125 14:40:15.186847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-5rg2m" event={"ID":"4531c2f9-6d64-4cdd-8546-a4494fcdf027","Type":"ContainerStarted","Data":"c8b4240a7bbd91bbc0ab2c99612ffda77c72c685c884f0aab41175473375ee2b"} Nov 25 14:40:15 crc kubenswrapper[4879]: I1125 14:40:15.187900 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"d0879d346f9f3ad59504611d0837705651dd20b7e2db3ddba064d9b227efbfa9"} Nov 25 14:40:16 crc kubenswrapper[4879]: I1125 14:40:16.194925 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-5rg2m" event={"ID":"4531c2f9-6d64-4cdd-8546-a4494fcdf027","Type":"ContainerStarted","Data":"93c1985ae15e3cb0853d823ffc8898d5ed9fa1fa9a1552b28ac7d9615dc1ffdc"} Nov 25 14:40:16 crc kubenswrapper[4879]: I1125 14:40:16.195056 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:16 crc kubenswrapper[4879]: I1125 14:40:16.222319 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-5rg2m" podStartSLOduration=3.222304133 podStartE2EDuration="3.222304133s" podCreationTimestamp="2025-11-25 14:40:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:40:16.218813642 +0000 UTC m=+907.822226713" watchObservedRunningTime="2025-11-25 14:40:16.222304133 +0000 UTC m=+907.825717204" Nov 25 14:40:17 crc kubenswrapper[4879]: I1125 14:40:17.161314 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:17 crc kubenswrapper[4879]: I1125 14:40:17.171696 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/043cbac7-0f97-43ed-a287-4af89ceaf905-memberlist\") pod \"speaker-45g57\" (UID: \"043cbac7-0f97-43ed-a287-4af89ceaf905\") " pod="metallb-system/speaker-45g57" Nov 25 14:40:17 crc kubenswrapper[4879]: I1125 14:40:17.371769 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-45g57" Nov 25 14:40:17 crc kubenswrapper[4879]: W1125 14:40:17.403884 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod043cbac7_0f97_43ed_a287_4af89ceaf905.slice/crio-203282d246bbde9d2eb68ff5ba6a2286034fc586c9e1fb52caa146225a76726b WatchSource:0}: Error finding container 203282d246bbde9d2eb68ff5ba6a2286034fc586c9e1fb52caa146225a76726b: Status 404 returned error can't find the container with id 203282d246bbde9d2eb68ff5ba6a2286034fc586c9e1fb52caa146225a76726b Nov 25 14:40:18 crc kubenswrapper[4879]: I1125 14:40:18.219555 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-45g57" event={"ID":"043cbac7-0f97-43ed-a287-4af89ceaf905","Type":"ContainerStarted","Data":"b678aff0599ee8b3b6df5ac6841bc4195356c96030adbfc5ebbda07358886cf7"} Nov 25 14:40:18 crc kubenswrapper[4879]: I1125 14:40:18.219863 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-45g57" event={"ID":"043cbac7-0f97-43ed-a287-4af89ceaf905","Type":"ContainerStarted","Data":"6607b1f0813ebd6281c8f456e90bc34de1868d08b3f614633b758985edfd9dd6"} Nov 25 14:40:18 crc kubenswrapper[4879]: I1125 14:40:18.219879 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-45g57" event={"ID":"043cbac7-0f97-43ed-a287-4af89ceaf905","Type":"ContainerStarted","Data":"203282d246bbde9d2eb68ff5ba6a2286034fc586c9e1fb52caa146225a76726b"} Nov 25 14:40:18 crc kubenswrapper[4879]: I1125 14:40:18.220714 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-45g57" Nov 25 14:40:18 crc kubenswrapper[4879]: I1125 14:40:18.235219 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-45g57" podStartSLOduration=5.235200891 podStartE2EDuration="5.235200891s" podCreationTimestamp="2025-11-25 14:40:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:40:18.233145223 +0000 UTC m=+909.836558294" watchObservedRunningTime="2025-11-25 14:40:18.235200891 +0000 UTC m=+909.838613962" Nov 25 14:40:23 crc kubenswrapper[4879]: I1125 14:40:23.247689 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" event={"ID":"6b2f76a4-9bca-41a3-9be6-48dd06986803","Type":"ContainerStarted","Data":"ae72404366d35c5ccc5fab5a20051eb52b84b308680004d0014ae3c5db54affb"} Nov 25 14:40:23 crc kubenswrapper[4879]: I1125 14:40:23.248479 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:23 crc kubenswrapper[4879]: I1125 14:40:23.266438 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerDied","Data":"1af437b4d0e5d4cfd297ab11d53fd2ad66455e09376185b56952edd0803c4c04"} Nov 25 14:40:23 crc kubenswrapper[4879]: I1125 14:40:23.266245 4879 generic.go:334] "Generic (PLEG): container finished" podID="b11c1717-f8f4-4838-acc9-7dc492d69268" containerID="1af437b4d0e5d4cfd297ab11d53fd2ad66455e09376185b56952edd0803c4c04" exitCode=0 Nov 25 14:40:23 crc kubenswrapper[4879]: I1125 14:40:23.276610 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" podStartSLOduration=1.791244418 podStartE2EDuration="10.276584576s" podCreationTimestamp="2025-11-25 14:40:13 +0000 UTC" firstStartedPulling="2025-11-25 14:40:13.930720095 +0000 UTC m=+905.534133166" lastFinishedPulling="2025-11-25 14:40:22.416060253 +0000 UTC m=+914.019473324" observedRunningTime="2025-11-25 14:40:23.271524961 +0000 UTC m=+914.874938042" watchObservedRunningTime="2025-11-25 14:40:23.276584576 +0000 UTC m=+914.879997637" Nov 25 14:40:24 crc kubenswrapper[4879]: I1125 14:40:24.274189 4879 generic.go:334] "Generic (PLEG): container finished" podID="b11c1717-f8f4-4838-acc9-7dc492d69268" containerID="8cf68de87961806159cb2d68061b85b45023bb8e3bd6b3866800190207beaf09" exitCode=0 Nov 25 14:40:24 crc kubenswrapper[4879]: I1125 14:40:24.274374 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerDied","Data":"8cf68de87961806159cb2d68061b85b45023bb8e3bd6b3866800190207beaf09"} Nov 25 14:40:25 crc kubenswrapper[4879]: I1125 14:40:25.285865 4879 generic.go:334] "Generic (PLEG): container finished" podID="b11c1717-f8f4-4838-acc9-7dc492d69268" containerID="64b88ad526bed72554aa2ebb0092a4771728c82f191141bb4c7a13c3e87c7296" exitCode=0 Nov 25 14:40:25 crc kubenswrapper[4879]: I1125 14:40:25.285990 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerDied","Data":"64b88ad526bed72554aa2ebb0092a4771728c82f191141bb4c7a13c3e87c7296"} Nov 25 14:40:26 crc kubenswrapper[4879]: I1125 14:40:26.296935 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"22711da5714dfcafdf8fbdde8cf02d66a7c68ae79664f25b3ca6e2253c62762b"} Nov 25 14:40:26 crc kubenswrapper[4879]: I1125 14:40:26.297683 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"f748f84fef567d5730ebd979d3d67a62c7149119775aa454f96df3efe6d5928f"} Nov 25 14:40:26 crc kubenswrapper[4879]: I1125 14:40:26.297793 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"e4dcaa577c0f9c3e311dce6dc718b4d5961e28e1f49a4dc4b2114cbf42eaea25"} Nov 25 14:40:26 crc kubenswrapper[4879]: I1125 14:40:26.297855 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"b62c636650ac9d7fd298e7e6bef4f27f05a635f0a732ba0bcc369de2b0fd6ba8"} Nov 25 14:40:26 crc kubenswrapper[4879]: I1125 14:40:26.297914 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"1b46e78195263df02d71c14b8a7da57d241faaff15ee334c544bfb78c9b07817"} Nov 25 14:40:27 crc kubenswrapper[4879]: I1125 14:40:27.307690 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vx7w9" event={"ID":"b11c1717-f8f4-4838-acc9-7dc492d69268","Type":"ContainerStarted","Data":"32a814bdae604f2d82a18e1036104dddcc6a6ef7f77a2a49b553f695ffc79c6c"} Nov 25 14:40:27 crc kubenswrapper[4879]: I1125 14:40:27.307878 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:27 crc kubenswrapper[4879]: I1125 14:40:27.336027 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-vx7w9" podStartSLOduration=6.302747759 podStartE2EDuration="14.336000916s" podCreationTimestamp="2025-11-25 14:40:13 +0000 UTC" firstStartedPulling="2025-11-25 14:40:14.343185753 +0000 UTC m=+905.946598824" lastFinishedPulling="2025-11-25 14:40:22.37643891 +0000 UTC m=+913.979851981" observedRunningTime="2025-11-25 14:40:27.329114489 +0000 UTC m=+918.932527560" watchObservedRunningTime="2025-11-25 14:40:27.336000916 +0000 UTC m=+918.939413987" Nov 25 14:40:27 crc kubenswrapper[4879]: I1125 14:40:27.376200 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-45g57" Nov 25 14:40:28 crc kubenswrapper[4879]: I1125 14:40:28.955207 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:28 crc kubenswrapper[4879]: I1125 14:40:28.996504 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.070207 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd"] Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.071589 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.073834 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.088061 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd"] Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.216049 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2m49\" (UniqueName: \"kubernetes.io/projected/5f9ead12-79a9-40a3-a327-7b281e40ff56-kube-api-access-k2m49\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.216198 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.216239 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.317012 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.317066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.317157 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2m49\" (UniqueName: \"kubernetes.io/projected/5f9ead12-79a9-40a3-a327-7b281e40ff56-kube-api-access-k2m49\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.317618 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.317742 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.343513 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2m49\" (UniqueName: \"kubernetes.io/projected/5f9ead12-79a9-40a3-a327-7b281e40ff56-kube-api-access-k2m49\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.386622 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:29 crc kubenswrapper[4879]: I1125 14:40:29.588163 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd"] Nov 25 14:40:29 crc kubenswrapper[4879]: W1125 14:40:29.593570 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f9ead12_79a9_40a3_a327_7b281e40ff56.slice/crio-9bacdf46438db980e5c2c95a7929a99bb5429ad777df54b2a771cf6ade15f474 WatchSource:0}: Error finding container 9bacdf46438db980e5c2c95a7929a99bb5429ad777df54b2a771cf6ade15f474: Status 404 returned error can't find the container with id 9bacdf46438db980e5c2c95a7929a99bb5429ad777df54b2a771cf6ade15f474 Nov 25 14:40:30 crc kubenswrapper[4879]: I1125 14:40:30.324425 4879 generic.go:334] "Generic (PLEG): container finished" podID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerID="1b95134883abfa8169b47860c20c9bb97d1195f458f53d5af7a5085827cb6531" exitCode=0 Nov 25 14:40:30 crc kubenswrapper[4879]: I1125 14:40:30.324487 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" event={"ID":"5f9ead12-79a9-40a3-a327-7b281e40ff56","Type":"ContainerDied","Data":"1b95134883abfa8169b47860c20c9bb97d1195f458f53d5af7a5085827cb6531"} Nov 25 14:40:30 crc kubenswrapper[4879]: I1125 14:40:30.324751 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" event={"ID":"5f9ead12-79a9-40a3-a327-7b281e40ff56","Type":"ContainerStarted","Data":"9bacdf46438db980e5c2c95a7929a99bb5429ad777df54b2a771cf6ade15f474"} Nov 25 14:40:34 crc kubenswrapper[4879]: I1125 14:40:34.048691 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-29xht" Nov 25 14:40:34 crc kubenswrapper[4879]: I1125 14:40:34.428637 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-5rg2m" Nov 25 14:40:39 crc kubenswrapper[4879]: I1125 14:40:39.386060 4879 generic.go:334] "Generic (PLEG): container finished" podID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerID="a38b8f8fa06d38c6ba8da95e50b68d845fecff7f3eb1a1c94ca94db8f81b7858" exitCode=0 Nov 25 14:40:39 crc kubenswrapper[4879]: I1125 14:40:39.386704 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" event={"ID":"5f9ead12-79a9-40a3-a327-7b281e40ff56","Type":"ContainerDied","Data":"a38b8f8fa06d38c6ba8da95e50b68d845fecff7f3eb1a1c94ca94db8f81b7858"} Nov 25 14:40:40 crc kubenswrapper[4879]: I1125 14:40:40.396903 4879 generic.go:334] "Generic (PLEG): container finished" podID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerID="5a01809e9c9bb0790cf5a96113f09b54e13ae509ebb9938071e36f1c1638e573" exitCode=0 Nov 25 14:40:40 crc kubenswrapper[4879]: I1125 14:40:40.397282 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" event={"ID":"5f9ead12-79a9-40a3-a327-7b281e40ff56","Type":"ContainerDied","Data":"5a01809e9c9bb0790cf5a96113f09b54e13ae509ebb9938071e36f1c1638e573"} Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.639137 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.671453 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-bundle\") pod \"5f9ead12-79a9-40a3-a327-7b281e40ff56\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.671524 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2m49\" (UniqueName: \"kubernetes.io/projected/5f9ead12-79a9-40a3-a327-7b281e40ff56-kube-api-access-k2m49\") pod \"5f9ead12-79a9-40a3-a327-7b281e40ff56\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.671558 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-util\") pod \"5f9ead12-79a9-40a3-a327-7b281e40ff56\" (UID: \"5f9ead12-79a9-40a3-a327-7b281e40ff56\") " Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.673924 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-bundle" (OuterVolumeSpecName: "bundle") pod "5f9ead12-79a9-40a3-a327-7b281e40ff56" (UID: "5f9ead12-79a9-40a3-a327-7b281e40ff56"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.678489 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f9ead12-79a9-40a3-a327-7b281e40ff56-kube-api-access-k2m49" (OuterVolumeSpecName: "kube-api-access-k2m49") pod "5f9ead12-79a9-40a3-a327-7b281e40ff56" (UID: "5f9ead12-79a9-40a3-a327-7b281e40ff56"). InnerVolumeSpecName "kube-api-access-k2m49". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.683241 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-util" (OuterVolumeSpecName: "util") pod "5f9ead12-79a9-40a3-a327-7b281e40ff56" (UID: "5f9ead12-79a9-40a3-a327-7b281e40ff56"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.773227 4879 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.773259 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2m49\" (UniqueName: \"kubernetes.io/projected/5f9ead12-79a9-40a3-a327-7b281e40ff56-kube-api-access-k2m49\") on node \"crc\" DevicePath \"\"" Nov 25 14:40:41 crc kubenswrapper[4879]: I1125 14:40:41.773270 4879 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f9ead12-79a9-40a3-a327-7b281e40ff56-util\") on node \"crc\" DevicePath \"\"" Nov 25 14:40:42 crc kubenswrapper[4879]: I1125 14:40:42.408889 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" event={"ID":"5f9ead12-79a9-40a3-a327-7b281e40ff56","Type":"ContainerDied","Data":"9bacdf46438db980e5c2c95a7929a99bb5429ad777df54b2a771cf6ade15f474"} Nov 25 14:40:42 crc kubenswrapper[4879]: I1125 14:40:42.408938 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bacdf46438db980e5c2c95a7929a99bb5429ad777df54b2a771cf6ade15f474" Nov 25 14:40:42 crc kubenswrapper[4879]: I1125 14:40:42.409330 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd" Nov 25 14:40:43 crc kubenswrapper[4879]: I1125 14:40:43.961248 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-vx7w9" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.145333 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw"] Nov 25 14:40:56 crc kubenswrapper[4879]: E1125 14:40:56.146028 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="extract" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.146039 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="extract" Nov 25 14:40:56 crc kubenswrapper[4879]: E1125 14:40:56.146054 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="util" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.146060 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="util" Nov 25 14:40:56 crc kubenswrapper[4879]: E1125 14:40:56.146074 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="pull" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.146079 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="pull" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.146187 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f9ead12-79a9-40a3-a327-7b281e40ff56" containerName="extract" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.146597 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.149071 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.149342 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.149424 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-msmxj" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.166181 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw"] Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.181181 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sz7sw\" (UID: \"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.181373 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bgzl\" (UniqueName: \"kubernetes.io/projected/5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73-kube-api-access-9bgzl\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sz7sw\" (UID: \"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.282473 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bgzl\" (UniqueName: \"kubernetes.io/projected/5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73-kube-api-access-9bgzl\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sz7sw\" (UID: \"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.282581 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sz7sw\" (UID: \"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.283154 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sz7sw\" (UID: \"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.301572 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bgzl\" (UniqueName: \"kubernetes.io/projected/5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73-kube-api-access-9bgzl\") pod \"cert-manager-operator-controller-manager-64cf6dff88-sz7sw\" (UID: \"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.503912 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" Nov 25 14:40:56 crc kubenswrapper[4879]: I1125 14:40:56.737264 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw"] Nov 25 14:40:57 crc kubenswrapper[4879]: I1125 14:40:57.539636 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" event={"ID":"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73","Type":"ContainerStarted","Data":"eeb3c293010974dec35d1c9466b8087e3775784d7c9778564be5d40ee0424a43"} Nov 25 14:41:05 crc kubenswrapper[4879]: I1125 14:41:05.591323 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" event={"ID":"5bd8dad2-9a71-4bd0-a95a-f8b9e05a1b73","Type":"ContainerStarted","Data":"651e64feaa77e099d951c5a0aebf42d0d54c078e020919ef6f83840000e53889"} Nov 25 14:41:05 crc kubenswrapper[4879]: I1125 14:41:05.609285 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-sz7sw" podStartSLOduration=1.792200303 podStartE2EDuration="9.609264304s" podCreationTimestamp="2025-11-25 14:40:56 +0000 UTC" firstStartedPulling="2025-11-25 14:40:56.748524655 +0000 UTC m=+948.351937716" lastFinishedPulling="2025-11-25 14:41:04.565588646 +0000 UTC m=+956.169001717" observedRunningTime="2025-11-25 14:41:05.608507743 +0000 UTC m=+957.211920814" watchObservedRunningTime="2025-11-25 14:41:05.609264304 +0000 UTC m=+957.212677385" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.635998 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-gwv6b"] Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.636943 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.642078 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.642335 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-nxpcm" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.642440 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.652521 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-gwv6b"] Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.668380 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/99bc64d6-25b5-4e92-a4c7-4e744af93df6-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-gwv6b\" (UID: \"99bc64d6-25b5-4e92-a4c7-4e744af93df6\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.668459 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh826\" (UniqueName: \"kubernetes.io/projected/99bc64d6-25b5-4e92-a4c7-4e744af93df6-kube-api-access-lh826\") pod \"cert-manager-webhook-f4fb5df64-gwv6b\" (UID: \"99bc64d6-25b5-4e92-a4c7-4e744af93df6\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.769418 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/99bc64d6-25b5-4e92-a4c7-4e744af93df6-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-gwv6b\" (UID: \"99bc64d6-25b5-4e92-a4c7-4e744af93df6\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.769763 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh826\" (UniqueName: \"kubernetes.io/projected/99bc64d6-25b5-4e92-a4c7-4e744af93df6-kube-api-access-lh826\") pod \"cert-manager-webhook-f4fb5df64-gwv6b\" (UID: \"99bc64d6-25b5-4e92-a4c7-4e744af93df6\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.792415 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh826\" (UniqueName: \"kubernetes.io/projected/99bc64d6-25b5-4e92-a4c7-4e744af93df6-kube-api-access-lh826\") pod \"cert-manager-webhook-f4fb5df64-gwv6b\" (UID: \"99bc64d6-25b5-4e92-a4c7-4e744af93df6\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.804271 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/99bc64d6-25b5-4e92-a4c7-4e744af93df6-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-gwv6b\" (UID: \"99bc64d6-25b5-4e92-a4c7-4e744af93df6\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:07 crc kubenswrapper[4879]: I1125 14:41:07.958686 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:08 crc kubenswrapper[4879]: I1125 14:41:08.564184 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-gwv6b"] Nov 25 14:41:08 crc kubenswrapper[4879]: I1125 14:41:08.614767 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" event={"ID":"99bc64d6-25b5-4e92-a4c7-4e744af93df6","Type":"ContainerStarted","Data":"85b520a56f0726fcecc44db435a956611aa82b68b0c0345229c407ad29ca0ddd"} Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.062277 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5"] Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.063244 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.068427 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-rdk4k" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.085303 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5"] Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.213049 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-929m2\" (UniqueName: \"kubernetes.io/projected/3b39b6d7-9d19-4602-ad6e-c59a531818a8-kube-api-access-929m2\") pod \"cert-manager-cainjector-855d9ccff4-bf2x5\" (UID: \"3b39b6d7-9d19-4602-ad6e-c59a531818a8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.213153 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b39b6d7-9d19-4602-ad6e-c59a531818a8-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-bf2x5\" (UID: \"3b39b6d7-9d19-4602-ad6e-c59a531818a8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.315113 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b39b6d7-9d19-4602-ad6e-c59a531818a8-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-bf2x5\" (UID: \"3b39b6d7-9d19-4602-ad6e-c59a531818a8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.315274 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-929m2\" (UniqueName: \"kubernetes.io/projected/3b39b6d7-9d19-4602-ad6e-c59a531818a8-kube-api-access-929m2\") pod \"cert-manager-cainjector-855d9ccff4-bf2x5\" (UID: \"3b39b6d7-9d19-4602-ad6e-c59a531818a8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.332636 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-929m2\" (UniqueName: \"kubernetes.io/projected/3b39b6d7-9d19-4602-ad6e-c59a531818a8-kube-api-access-929m2\") pod \"cert-manager-cainjector-855d9ccff4-bf2x5\" (UID: \"3b39b6d7-9d19-4602-ad6e-c59a531818a8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.335116 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b39b6d7-9d19-4602-ad6e-c59a531818a8-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-bf2x5\" (UID: \"3b39b6d7-9d19-4602-ad6e-c59a531818a8\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.379340 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.613198 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5"] Nov 25 14:41:11 crc kubenswrapper[4879]: I1125 14:41:11.632531 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" event={"ID":"3b39b6d7-9d19-4602-ad6e-c59a531818a8","Type":"ContainerStarted","Data":"56d15003451838380aed66c415befc6fef8bdce395042c59748d72bb3af995bb"} Nov 25 14:41:17 crc kubenswrapper[4879]: I1125 14:41:17.408488 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:41:17 crc kubenswrapper[4879]: I1125 14:41:17.409015 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:41:20 crc kubenswrapper[4879]: I1125 14:41:20.999502 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-z6dzl"] Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.001527 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.006800 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-wkh6w" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.018071 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-z6dzl"] Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.185957 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7s96\" (UniqueName: \"kubernetes.io/projected/2df9d6c8-3f76-4f2d-b7ff-c4874549babd-kube-api-access-q7s96\") pod \"cert-manager-86cb77c54b-z6dzl\" (UID: \"2df9d6c8-3f76-4f2d-b7ff-c4874549babd\") " pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.186386 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2df9d6c8-3f76-4f2d-b7ff-c4874549babd-bound-sa-token\") pod \"cert-manager-86cb77c54b-z6dzl\" (UID: \"2df9d6c8-3f76-4f2d-b7ff-c4874549babd\") " pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.287762 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2df9d6c8-3f76-4f2d-b7ff-c4874549babd-bound-sa-token\") pod \"cert-manager-86cb77c54b-z6dzl\" (UID: \"2df9d6c8-3f76-4f2d-b7ff-c4874549babd\") " pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.287839 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7s96\" (UniqueName: \"kubernetes.io/projected/2df9d6c8-3f76-4f2d-b7ff-c4874549babd-kube-api-access-q7s96\") pod \"cert-manager-86cb77c54b-z6dzl\" (UID: \"2df9d6c8-3f76-4f2d-b7ff-c4874549babd\") " pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.311384 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7s96\" (UniqueName: \"kubernetes.io/projected/2df9d6c8-3f76-4f2d-b7ff-c4874549babd-kube-api-access-q7s96\") pod \"cert-manager-86cb77c54b-z6dzl\" (UID: \"2df9d6c8-3f76-4f2d-b7ff-c4874549babd\") " pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.317164 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2df9d6c8-3f76-4f2d-b7ff-c4874549babd-bound-sa-token\") pod \"cert-manager-86cb77c54b-z6dzl\" (UID: \"2df9d6c8-3f76-4f2d-b7ff-c4874549babd\") " pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.326148 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.720288 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" event={"ID":"99bc64d6-25b5-4e92-a4c7-4e744af93df6","Type":"ContainerStarted","Data":"b94dbab8f4b6957655bda01fe32a450c1529fe55909b1a469c34aa12611062ff"} Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.720616 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.722561 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" event={"ID":"3b39b6d7-9d19-4602-ad6e-c59a531818a8","Type":"ContainerStarted","Data":"d4d89736cc943bb84edda7ac4b7eae64d0233104c931c5237f67e0440f2c10a8"} Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.791022 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" podStartSLOduration=2.365114226 podStartE2EDuration="14.790999255s" podCreationTimestamp="2025-11-25 14:41:07 +0000 UTC" firstStartedPulling="2025-11-25 14:41:08.572451433 +0000 UTC m=+960.175864504" lastFinishedPulling="2025-11-25 14:41:20.998336462 +0000 UTC m=+972.601749533" observedRunningTime="2025-11-25 14:41:21.788951166 +0000 UTC m=+973.392364237" watchObservedRunningTime="2025-11-25 14:41:21.790999255 +0000 UTC m=+973.394412326" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.811865 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" podStartSLOduration=1.423022891 podStartE2EDuration="10.81184419s" podCreationTimestamp="2025-11-25 14:41:11 +0000 UTC" firstStartedPulling="2025-11-25 14:41:11.623606506 +0000 UTC m=+963.227019577" lastFinishedPulling="2025-11-25 14:41:21.012427805 +0000 UTC m=+972.615840876" observedRunningTime="2025-11-25 14:41:21.807692762 +0000 UTC m=+973.411105843" watchObservedRunningTime="2025-11-25 14:41:21.81184419 +0000 UTC m=+973.415257261" Nov 25 14:41:21 crc kubenswrapper[4879]: I1125 14:41:21.820220 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-z6dzl"] Nov 25 14:41:21 crc kubenswrapper[4879]: W1125 14:41:21.826339 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2df9d6c8_3f76_4f2d_b7ff_c4874549babd.slice/crio-07d291c9f36893e9a70e81fcf8579db44830d57845ab4ac6fbcb6b71c2c2cf37 WatchSource:0}: Error finding container 07d291c9f36893e9a70e81fcf8579db44830d57845ab4ac6fbcb6b71c2c2cf37: Status 404 returned error can't find the container with id 07d291c9f36893e9a70e81fcf8579db44830d57845ab4ac6fbcb6b71c2c2cf37 Nov 25 14:41:22 crc kubenswrapper[4879]: I1125 14:41:22.734246 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" event={"ID":"2df9d6c8-3f76-4f2d-b7ff-c4874549babd","Type":"ContainerStarted","Data":"7bc07413625658bd1bc2f69508dbf81902d43a663017c4cbe509d4aae8decc02"} Nov 25 14:41:22 crc kubenswrapper[4879]: I1125 14:41:22.734314 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" event={"ID":"2df9d6c8-3f76-4f2d-b7ff-c4874549babd","Type":"ContainerStarted","Data":"07d291c9f36893e9a70e81fcf8579db44830d57845ab4ac6fbcb6b71c2c2cf37"} Nov 25 14:41:22 crc kubenswrapper[4879]: I1125 14:41:22.754541 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" podStartSLOduration=2.754523782 podStartE2EDuration="2.754523782s" podCreationTimestamp="2025-11-25 14:41:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:41:22.751394163 +0000 UTC m=+974.354807234" watchObservedRunningTime="2025-11-25 14:41:22.754523782 +0000 UTC m=+974.357936853" Nov 25 14:41:27 crc kubenswrapper[4879]: I1125 14:41:27.963178 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-gwv6b" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.468676 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-775x6"] Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.469868 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.472675 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qffvr" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.472689 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.489763 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.524114 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-775x6"] Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.615108 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfxhm\" (UniqueName: \"kubernetes.io/projected/e62fdb72-e253-4d60-91b3-e649ca0e514e-kube-api-access-pfxhm\") pod \"openstack-operator-index-775x6\" (UID: \"e62fdb72-e253-4d60-91b3-e649ca0e514e\") " pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.717291 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfxhm\" (UniqueName: \"kubernetes.io/projected/e62fdb72-e253-4d60-91b3-e649ca0e514e-kube-api-access-pfxhm\") pod \"openstack-operator-index-775x6\" (UID: \"e62fdb72-e253-4d60-91b3-e649ca0e514e\") " pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.737989 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfxhm\" (UniqueName: \"kubernetes.io/projected/e62fdb72-e253-4d60-91b3-e649ca0e514e-kube-api-access-pfxhm\") pod \"openstack-operator-index-775x6\" (UID: \"e62fdb72-e253-4d60-91b3-e649ca0e514e\") " pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:31 crc kubenswrapper[4879]: I1125 14:41:31.824530 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:32 crc kubenswrapper[4879]: I1125 14:41:32.229021 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-775x6"] Nov 25 14:41:32 crc kubenswrapper[4879]: W1125 14:41:32.233790 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode62fdb72_e253_4d60_91b3_e649ca0e514e.slice/crio-edadaff2f1e72ddb6300ff837476921ca5a7f104bb1278cd5a362e7122b029f7 WatchSource:0}: Error finding container edadaff2f1e72ddb6300ff837476921ca5a7f104bb1278cd5a362e7122b029f7: Status 404 returned error can't find the container with id edadaff2f1e72ddb6300ff837476921ca5a7f104bb1278cd5a362e7122b029f7 Nov 25 14:41:32 crc kubenswrapper[4879]: I1125 14:41:32.787874 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-775x6" event={"ID":"e62fdb72-e253-4d60-91b3-e649ca0e514e","Type":"ContainerStarted","Data":"edadaff2f1e72ddb6300ff837476921ca5a7f104bb1278cd5a362e7122b029f7"} Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.051702 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-775x6"] Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.659206 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-7nr95"] Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.660050 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.688093 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7nr95"] Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.774978 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsgxr\" (UniqueName: \"kubernetes.io/projected/df6e65ac-24ef-413b-87e9-09f1a13e9d60-kube-api-access-hsgxr\") pod \"openstack-operator-index-7nr95\" (UID: \"df6e65ac-24ef-413b-87e9-09f1a13e9d60\") " pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.877292 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsgxr\" (UniqueName: \"kubernetes.io/projected/df6e65ac-24ef-413b-87e9-09f1a13e9d60-kube-api-access-hsgxr\") pod \"openstack-operator-index-7nr95\" (UID: \"df6e65ac-24ef-413b-87e9-09f1a13e9d60\") " pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.898473 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsgxr\" (UniqueName: \"kubernetes.io/projected/df6e65ac-24ef-413b-87e9-09f1a13e9d60-kube-api-access-hsgxr\") pod \"openstack-operator-index-7nr95\" (UID: \"df6e65ac-24ef-413b-87e9-09f1a13e9d60\") " pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:34 crc kubenswrapper[4879]: I1125 14:41:34.980721 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:35 crc kubenswrapper[4879]: I1125 14:41:35.661964 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7nr95"] Nov 25 14:41:35 crc kubenswrapper[4879]: W1125 14:41:35.879288 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf6e65ac_24ef_413b_87e9_09f1a13e9d60.slice/crio-261e7eee7786a9faae48c5e24ea416bb457411593a8b2585621853caff1de711 WatchSource:0}: Error finding container 261e7eee7786a9faae48c5e24ea416bb457411593a8b2585621853caff1de711: Status 404 returned error can't find the container with id 261e7eee7786a9faae48c5e24ea416bb457411593a8b2585621853caff1de711 Nov 25 14:41:36 crc kubenswrapper[4879]: I1125 14:41:36.816218 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7nr95" event={"ID":"df6e65ac-24ef-413b-87e9-09f1a13e9d60","Type":"ContainerStarted","Data":"261e7eee7786a9faae48c5e24ea416bb457411593a8b2585621853caff1de711"} Nov 25 14:41:38 crc kubenswrapper[4879]: I1125 14:41:38.830854 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-775x6" event={"ID":"e62fdb72-e253-4d60-91b3-e649ca0e514e","Type":"ContainerStarted","Data":"4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981"} Nov 25 14:41:38 crc kubenswrapper[4879]: I1125 14:41:38.830877 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-775x6" podUID="e62fdb72-e253-4d60-91b3-e649ca0e514e" containerName="registry-server" containerID="cri-o://4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981" gracePeriod=2 Nov 25 14:41:38 crc kubenswrapper[4879]: I1125 14:41:38.834271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7nr95" event={"ID":"df6e65ac-24ef-413b-87e9-09f1a13e9d60","Type":"ContainerStarted","Data":"0ccaaf2d6d0f896ec578b6c5ea1f26c42e54d1299e4a59e228796c86532251ec"} Nov 25 14:41:38 crc kubenswrapper[4879]: I1125 14:41:38.869596 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-7nr95" podStartSLOduration=2.828444407 podStartE2EDuration="4.869580494s" podCreationTimestamp="2025-11-25 14:41:34 +0000 UTC" firstStartedPulling="2025-11-25 14:41:35.881116884 +0000 UTC m=+987.484529975" lastFinishedPulling="2025-11-25 14:41:37.922252991 +0000 UTC m=+989.525666062" observedRunningTime="2025-11-25 14:41:38.868885745 +0000 UTC m=+990.472298816" watchObservedRunningTime="2025-11-25 14:41:38.869580494 +0000 UTC m=+990.472993565" Nov 25 14:41:38 crc kubenswrapper[4879]: I1125 14:41:38.870334 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-775x6" podStartSLOduration=2.188315096 podStartE2EDuration="7.870326126s" podCreationTimestamp="2025-11-25 14:41:31 +0000 UTC" firstStartedPulling="2025-11-25 14:41:32.235616178 +0000 UTC m=+983.839029249" lastFinishedPulling="2025-11-25 14:41:37.917627208 +0000 UTC m=+989.521040279" observedRunningTime="2025-11-25 14:41:38.850849979 +0000 UTC m=+990.454263050" watchObservedRunningTime="2025-11-25 14:41:38.870326126 +0000 UTC m=+990.473739197" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.259473 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.437053 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfxhm\" (UniqueName: \"kubernetes.io/projected/e62fdb72-e253-4d60-91b3-e649ca0e514e-kube-api-access-pfxhm\") pod \"e62fdb72-e253-4d60-91b3-e649ca0e514e\" (UID: \"e62fdb72-e253-4d60-91b3-e649ca0e514e\") " Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.444289 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e62fdb72-e253-4d60-91b3-e649ca0e514e-kube-api-access-pfxhm" (OuterVolumeSpecName: "kube-api-access-pfxhm") pod "e62fdb72-e253-4d60-91b3-e649ca0e514e" (UID: "e62fdb72-e253-4d60-91b3-e649ca0e514e"). InnerVolumeSpecName "kube-api-access-pfxhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.538433 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfxhm\" (UniqueName: \"kubernetes.io/projected/e62fdb72-e253-4d60-91b3-e649ca0e514e-kube-api-access-pfxhm\") on node \"crc\" DevicePath \"\"" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.841628 4879 generic.go:334] "Generic (PLEG): container finished" podID="e62fdb72-e253-4d60-91b3-e649ca0e514e" containerID="4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981" exitCode=0 Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.841998 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-775x6" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.842383 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-775x6" event={"ID":"e62fdb72-e253-4d60-91b3-e649ca0e514e","Type":"ContainerDied","Data":"4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981"} Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.842420 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-775x6" event={"ID":"e62fdb72-e253-4d60-91b3-e649ca0e514e","Type":"ContainerDied","Data":"edadaff2f1e72ddb6300ff837476921ca5a7f104bb1278cd5a362e7122b029f7"} Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.842442 4879 scope.go:117] "RemoveContainer" containerID="4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.859394 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-775x6"] Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.863023 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-775x6"] Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.867064 4879 scope.go:117] "RemoveContainer" containerID="4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981" Nov 25 14:41:39 crc kubenswrapper[4879]: E1125 14:41:39.867426 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981\": container with ID starting with 4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981 not found: ID does not exist" containerID="4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981" Nov 25 14:41:39 crc kubenswrapper[4879]: I1125 14:41:39.867458 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981"} err="failed to get container status \"4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981\": rpc error: code = NotFound desc = could not find container \"4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981\": container with ID starting with 4590ce8c1bbe8ab6f821d81f2e51c3d21d5cb30d9748517f3c32e61108358981 not found: ID does not exist" Nov 25 14:41:41 crc kubenswrapper[4879]: I1125 14:41:41.654399 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e62fdb72-e253-4d60-91b3-e649ca0e514e" path="/var/lib/kubelet/pods/e62fdb72-e253-4d60-91b3-e649ca0e514e/volumes" Nov 25 14:41:44 crc kubenswrapper[4879]: I1125 14:41:44.981803 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:44 crc kubenswrapper[4879]: I1125 14:41:44.982124 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:45 crc kubenswrapper[4879]: I1125 14:41:45.006372 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:45 crc kubenswrapper[4879]: I1125 14:41:45.902833 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-7nr95" Nov 25 14:41:47 crc kubenswrapper[4879]: I1125 14:41:47.409553 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:41:47 crc kubenswrapper[4879]: I1125 14:41:47.409635 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.285674 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml"] Nov 25 14:41:54 crc kubenswrapper[4879]: E1125 14:41:54.287227 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e62fdb72-e253-4d60-91b3-e649ca0e514e" containerName="registry-server" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.287327 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e62fdb72-e253-4d60-91b3-e649ca0e514e" containerName="registry-server" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.287499 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e62fdb72-e253-4d60-91b3-e649ca0e514e" containerName="registry-server" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.288355 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.290903 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-2ffcz" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.295947 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml"] Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.337953 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-util\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.338009 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w68tk\" (UniqueName: \"kubernetes.io/projected/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-kube-api-access-w68tk\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.338047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-bundle\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.439403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-util\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.439457 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w68tk\" (UniqueName: \"kubernetes.io/projected/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-kube-api-access-w68tk\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.439496 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-bundle\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.439933 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-util\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.440010 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-bundle\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.460565 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w68tk\" (UniqueName: \"kubernetes.io/projected/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-kube-api-access-w68tk\") pod \"e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:54 crc kubenswrapper[4879]: I1125 14:41:54.646823 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:41:55 crc kubenswrapper[4879]: I1125 14:41:55.027615 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml"] Nov 25 14:41:55 crc kubenswrapper[4879]: W1125 14:41:55.034265 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecdbbc60_230b_4da2_a5bb_0623c9d6e31f.slice/crio-0d2cd48fe7ca770b22d2d18b96370ebafdf249803e9c5adf36229b8fae67a057 WatchSource:0}: Error finding container 0d2cd48fe7ca770b22d2d18b96370ebafdf249803e9c5adf36229b8fae67a057: Status 404 returned error can't find the container with id 0d2cd48fe7ca770b22d2d18b96370ebafdf249803e9c5adf36229b8fae67a057 Nov 25 14:41:55 crc kubenswrapper[4879]: I1125 14:41:55.935681 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" event={"ID":"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f","Type":"ContainerStarted","Data":"0d2cd48fe7ca770b22d2d18b96370ebafdf249803e9c5adf36229b8fae67a057"} Nov 25 14:41:58 crc kubenswrapper[4879]: I1125 14:41:58.955638 4879 generic.go:334] "Generic (PLEG): container finished" podID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerID="e790804c52456c4837756430af28c5baf73048f52ce055b2c947c511fd6eaf55" exitCode=0 Nov 25 14:41:58 crc kubenswrapper[4879]: I1125 14:41:58.955697 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" event={"ID":"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f","Type":"ContainerDied","Data":"e790804c52456c4837756430af28c5baf73048f52ce055b2c947c511fd6eaf55"} Nov 25 14:42:00 crc kubenswrapper[4879]: I1125 14:42:00.971461 4879 generic.go:334] "Generic (PLEG): container finished" podID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerID="fcf55a5a6a01f5d025dd20722c3a1af8d4176d02e4233cb99328df4266e2b7f5" exitCode=0 Nov 25 14:42:00 crc kubenswrapper[4879]: I1125 14:42:00.971600 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" event={"ID":"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f","Type":"ContainerDied","Data":"fcf55a5a6a01f5d025dd20722c3a1af8d4176d02e4233cb99328df4266e2b7f5"} Nov 25 14:42:01 crc kubenswrapper[4879]: I1125 14:42:01.980253 4879 generic.go:334] "Generic (PLEG): container finished" podID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerID="a2169a733aa3d6599c9f691db70e823e8445f928f5ada5cbf96115efe0417d60" exitCode=0 Nov 25 14:42:01 crc kubenswrapper[4879]: I1125 14:42:01.980321 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" event={"ID":"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f","Type":"ContainerDied","Data":"a2169a733aa3d6599c9f691db70e823e8445f928f5ada5cbf96115efe0417d60"} Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.238359 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.259075 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-bundle\") pod \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.259160 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w68tk\" (UniqueName: \"kubernetes.io/projected/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-kube-api-access-w68tk\") pod \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.259220 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-util\") pod \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\" (UID: \"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f\") " Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.260841 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-bundle" (OuterVolumeSpecName: "bundle") pod "ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" (UID: "ecdbbc60-230b-4da2-a5bb-0623c9d6e31f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.266681 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-kube-api-access-w68tk" (OuterVolumeSpecName: "kube-api-access-w68tk") pod "ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" (UID: "ecdbbc60-230b-4da2-a5bb-0623c9d6e31f"). InnerVolumeSpecName "kube-api-access-w68tk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.276218 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-util" (OuterVolumeSpecName: "util") pod "ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" (UID: "ecdbbc60-230b-4da2-a5bb-0623c9d6e31f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.360569 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w68tk\" (UniqueName: \"kubernetes.io/projected/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-kube-api-access-w68tk\") on node \"crc\" DevicePath \"\"" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.360610 4879 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-util\") on node \"crc\" DevicePath \"\"" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.360623 4879 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ecdbbc60-230b-4da2-a5bb-0623c9d6e31f-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.994577 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" event={"ID":"ecdbbc60-230b-4da2-a5bb-0623c9d6e31f","Type":"ContainerDied","Data":"0d2cd48fe7ca770b22d2d18b96370ebafdf249803e9c5adf36229b8fae67a057"} Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.994914 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d2cd48fe7ca770b22d2d18b96370ebafdf249803e9c5adf36229b8fae67a057" Nov 25 14:42:03 crc kubenswrapper[4879]: I1125 14:42:03.994989 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.513174 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt"] Nov 25 14:42:06 crc kubenswrapper[4879]: E1125 14:42:06.513422 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="extract" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.513436 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="extract" Nov 25 14:42:06 crc kubenswrapper[4879]: E1125 14:42:06.513452 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="util" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.513458 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="util" Nov 25 14:42:06 crc kubenswrapper[4879]: E1125 14:42:06.513473 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="pull" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.513482 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="pull" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.513606 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecdbbc60-230b-4da2-a5bb-0623c9d6e31f" containerName="extract" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.513992 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.516111 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6tvg8" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.627115 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt"] Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.706253 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lb9d\" (UniqueName: \"kubernetes.io/projected/5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24-kube-api-access-8lb9d\") pod \"openstack-operator-controller-operator-5fd4b8b4b5-64rqt\" (UID: \"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24\") " pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.809171 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lb9d\" (UniqueName: \"kubernetes.io/projected/5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24-kube-api-access-8lb9d\") pod \"openstack-operator-controller-operator-5fd4b8b4b5-64rqt\" (UID: \"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24\") " pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:06 crc kubenswrapper[4879]: I1125 14:42:06.844258 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lb9d\" (UniqueName: \"kubernetes.io/projected/5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24-kube-api-access-8lb9d\") pod \"openstack-operator-controller-operator-5fd4b8b4b5-64rqt\" (UID: \"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24\") " pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:07 crc kubenswrapper[4879]: I1125 14:42:07.135604 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:07 crc kubenswrapper[4879]: I1125 14:42:07.365824 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt"] Nov 25 14:42:08 crc kubenswrapper[4879]: I1125 14:42:08.016665 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" event={"ID":"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24","Type":"ContainerStarted","Data":"96fa430c26b0dfef56c30bc9e6fdb026207bfec3d23848e84d8847dc00535948"} Nov 25 14:42:16 crc kubenswrapper[4879]: I1125 14:42:16.071134 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" event={"ID":"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24","Type":"ContainerStarted","Data":"d6be5beba992af8d235932b1fe4bdd42a1b7688f7ee2c5b82ae044ba7780f8d9"} Nov 25 14:42:16 crc kubenswrapper[4879]: I1125 14:42:16.071725 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:17 crc kubenswrapper[4879]: I1125 14:42:17.409452 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:42:17 crc kubenswrapper[4879]: I1125 14:42:17.409574 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:42:17 crc kubenswrapper[4879]: I1125 14:42:17.409648 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:42:17 crc kubenswrapper[4879]: I1125 14:42:17.410690 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"46313fa375227d4de77ccdffe103fa35d82fc54f0a327407b56caa844d260bef"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:42:17 crc kubenswrapper[4879]: I1125 14:42:17.410791 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://46313fa375227d4de77ccdffe103fa35d82fc54f0a327407b56caa844d260bef" gracePeriod=600 Nov 25 14:42:18 crc kubenswrapper[4879]: I1125 14:42:18.085163 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="46313fa375227d4de77ccdffe103fa35d82fc54f0a327407b56caa844d260bef" exitCode=0 Nov 25 14:42:18 crc kubenswrapper[4879]: I1125 14:42:18.085185 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"46313fa375227d4de77ccdffe103fa35d82fc54f0a327407b56caa844d260bef"} Nov 25 14:42:18 crc kubenswrapper[4879]: I1125 14:42:18.085551 4879 scope.go:117] "RemoveContainer" containerID="10f38710f0a2009721f3eed24465e5c8e6ced367d4a502ca16764d82d8c5b0b2" Nov 25 14:42:19 crc kubenswrapper[4879]: I1125 14:42:19.092918 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"47970bca382c877b01ebc504c8bf019e55bd57a204827e1087ac93b9715656b7"} Nov 25 14:42:19 crc kubenswrapper[4879]: I1125 14:42:19.111632 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" podStartSLOduration=5.386512673 podStartE2EDuration="13.111611243s" podCreationTimestamp="2025-11-25 14:42:06 +0000 UTC" firstStartedPulling="2025-11-25 14:42:07.375863462 +0000 UTC m=+1018.979276533" lastFinishedPulling="2025-11-25 14:42:15.100962032 +0000 UTC m=+1026.704375103" observedRunningTime="2025-11-25 14:42:16.10042093 +0000 UTC m=+1027.703834001" watchObservedRunningTime="2025-11-25 14:42:19.111611243 +0000 UTC m=+1030.715024314" Nov 25 14:42:27 crc kubenswrapper[4879]: I1125 14:42:27.138323 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.307267 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.321884 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.324827 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-q77vv" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.362134 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.362189 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.362980 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.363633 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.364008 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.367248 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-6d4l7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.371536 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-pxcqp" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.385059 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.385129 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.390169 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdfnw\" (UniqueName: \"kubernetes.io/projected/461b714a-4ee7-40ab-99d3-cd78552b52c6-kube-api-access-rdfnw\") pod \"cinder-operator-controller-manager-79856dc55c-sl6x7\" (UID: \"461b714a-4ee7-40ab-99d3-cd78552b52c6\") " pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.396485 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.397499 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.397892 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.400332 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-czl25" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.400643 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-v8tjg" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.404161 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.409168 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.429258 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.466976 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.468364 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.471972 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-qc22t" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.477788 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.479017 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.480449 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-tkcc8" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.486628 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.488226 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.497934 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdfnw\" (UniqueName: \"kubernetes.io/projected/461b714a-4ee7-40ab-99d3-cd78552b52c6-kube-api-access-rdfnw\") pod \"cinder-operator-controller-manager-79856dc55c-sl6x7\" (UID: \"461b714a-4ee7-40ab-99d3-cd78552b52c6\") " pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.498025 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk6lf\" (UniqueName: \"kubernetes.io/projected/230849f3-daef-4f23-9839-8f0bd76d8e4a-kube-api-access-mk6lf\") pod \"designate-operator-controller-manager-7d695c9b56-4z42w\" (UID: \"230849f3-daef-4f23-9839-8f0bd76d8e4a\") " pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.498047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kmhl\" (UniqueName: \"kubernetes.io/projected/a8b8b0f7-f988-46b1-b88f-751261b1c6a1-kube-api-access-6kmhl\") pod \"barbican-operator-controller-manager-86dc4d89c8-vf4mk\" (UID: \"a8b8b0f7-f988-46b1-b88f-751261b1c6a1\") " pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.498223 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.501785 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mxvzg" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.527512 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.537729 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdfnw\" (UniqueName: \"kubernetes.io/projected/461b714a-4ee7-40ab-99d3-cd78552b52c6-kube-api-access-rdfnw\") pod \"cinder-operator-controller-manager-79856dc55c-sl6x7\" (UID: \"461b714a-4ee7-40ab-99d3-cd78552b52c6\") " pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.560115 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.586170 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.598972 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29cv9\" (UniqueName: \"kubernetes.io/projected/0c0a5e22-8150-48b6-9b4f-a9b18bb4960f-kube-api-access-29cv9\") pod \"heat-operator-controller-manager-774b86978c-w8cc5\" (UID: \"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f\") " pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.599026 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cth5\" (UniqueName: \"kubernetes.io/projected/be0d238d-5b08-42e1-ac21-4e00592ab433-kube-api-access-6cth5\") pod \"ironic-operator-controller-manager-5bfcdc958c-8tgzv\" (UID: \"be0d238d-5b08-42e1-ac21-4e00592ab433\") " pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.599052 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44r6s\" (UniqueName: \"kubernetes.io/projected/afbf9f55-3316-40bb-b53b-d4d96482f9d5-kube-api-access-44r6s\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.599068 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6zb8\" (UniqueName: \"kubernetes.io/projected/cefe5024-a03a-427e-84a5-a4f6eac64f12-kube-api-access-p6zb8\") pod \"horizon-operator-controller-manager-68c9694994-ltnmm\" (UID: \"cefe5024-a03a-427e-84a5-a4f6eac64f12\") " pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.599086 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/afbf9f55-3316-40bb-b53b-d4d96482f9d5-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.599109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vztd\" (UniqueName: \"kubernetes.io/projected/e9e99ec6-68ec-4d48-847b-b5f350dc1fc4-kube-api-access-5vztd\") pod \"glance-operator-controller-manager-68b95954c9-gb5r4\" (UID: \"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4\") " pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.606830 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk6lf\" (UniqueName: \"kubernetes.io/projected/230849f3-daef-4f23-9839-8f0bd76d8e4a-kube-api-access-mk6lf\") pod \"designate-operator-controller-manager-7d695c9b56-4z42w\" (UID: \"230849f3-daef-4f23-9839-8f0bd76d8e4a\") " pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.607191 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kmhl\" (UniqueName: \"kubernetes.io/projected/a8b8b0f7-f988-46b1-b88f-751261b1c6a1-kube-api-access-6kmhl\") pod \"barbican-operator-controller-manager-86dc4d89c8-vf4mk\" (UID: \"a8b8b0f7-f988-46b1-b88f-751261b1c6a1\") " pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.608296 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.609281 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.617564 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bl9x8" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.618244 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.618299 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.619353 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.621874 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-d8nb7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.636887 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kmhl\" (UniqueName: \"kubernetes.io/projected/a8b8b0f7-f988-46b1-b88f-751261b1c6a1-kube-api-access-6kmhl\") pod \"barbican-operator-controller-manager-86dc4d89c8-vf4mk\" (UID: \"a8b8b0f7-f988-46b1-b88f-751261b1c6a1\") " pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.655098 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk6lf\" (UniqueName: \"kubernetes.io/projected/230849f3-daef-4f23-9839-8f0bd76d8e4a-kube-api-access-mk6lf\") pod \"designate-operator-controller-manager-7d695c9b56-4z42w\" (UID: \"230849f3-daef-4f23-9839-8f0bd76d8e4a\") " pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.668214 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.669578 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.678597 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-ptrrw" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.687059 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.693663 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.703990 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.707847 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29cv9\" (UniqueName: \"kubernetes.io/projected/0c0a5e22-8150-48b6-9b4f-a9b18bb4960f-kube-api-access-29cv9\") pod \"heat-operator-controller-manager-774b86978c-w8cc5\" (UID: \"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f\") " pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.707897 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cth5\" (UniqueName: \"kubernetes.io/projected/be0d238d-5b08-42e1-ac21-4e00592ab433-kube-api-access-6cth5\") pod \"ironic-operator-controller-manager-5bfcdc958c-8tgzv\" (UID: \"be0d238d-5b08-42e1-ac21-4e00592ab433\") " pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.707924 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44r6s\" (UniqueName: \"kubernetes.io/projected/afbf9f55-3316-40bb-b53b-d4d96482f9d5-kube-api-access-44r6s\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.707943 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6zb8\" (UniqueName: \"kubernetes.io/projected/cefe5024-a03a-427e-84a5-a4f6eac64f12-kube-api-access-p6zb8\") pod \"horizon-operator-controller-manager-68c9694994-ltnmm\" (UID: \"cefe5024-a03a-427e-84a5-a4f6eac64f12\") " pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.707958 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/afbf9f55-3316-40bb-b53b-d4d96482f9d5-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.707979 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vztd\" (UniqueName: \"kubernetes.io/projected/e9e99ec6-68ec-4d48-847b-b5f350dc1fc4-kube-api-access-5vztd\") pod \"glance-operator-controller-manager-68b95954c9-gb5r4\" (UID: \"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4\") " pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.708516 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:42:51 crc kubenswrapper[4879]: E1125 14:42:51.708931 4879 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 25 14:42:51 crc kubenswrapper[4879]: E1125 14:42:51.708971 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afbf9f55-3316-40bb-b53b-d4d96482f9d5-cert podName:afbf9f55-3316-40bb-b53b-d4d96482f9d5 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:52.208956348 +0000 UTC m=+1063.812369419 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/afbf9f55-3316-40bb-b53b-d4d96482f9d5-cert") pod "infra-operator-controller-manager-d5cc86f4b-8tdb9" (UID: "afbf9f55-3316-40bb-b53b-d4d96482f9d5") : secret "infra-operator-webhook-server-cert" not found Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.712360 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-dfrxk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.717815 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.726192 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.737768 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44r6s\" (UniqueName: \"kubernetes.io/projected/afbf9f55-3316-40bb-b53b-d4d96482f9d5-kube-api-access-44r6s\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.750285 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.750752 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.760745 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6zb8\" (UniqueName: \"kubernetes.io/projected/cefe5024-a03a-427e-84a5-a4f6eac64f12-kube-api-access-p6zb8\") pod \"horizon-operator-controller-manager-68c9694994-ltnmm\" (UID: \"cefe5024-a03a-427e-84a5-a4f6eac64f12\") " pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.761422 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vztd\" (UniqueName: \"kubernetes.io/projected/e9e99ec6-68ec-4d48-847b-b5f350dc1fc4-kube-api-access-5vztd\") pod \"glance-operator-controller-manager-68b95954c9-gb5r4\" (UID: \"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4\") " pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.765795 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29cv9\" (UniqueName: \"kubernetes.io/projected/0c0a5e22-8150-48b6-9b4f-a9b18bb4960f-kube-api-access-29cv9\") pod \"heat-operator-controller-manager-774b86978c-w8cc5\" (UID: \"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f\") " pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.766337 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cth5\" (UniqueName: \"kubernetes.io/projected/be0d238d-5b08-42e1-ac21-4e00592ab433-kube-api-access-6cth5\") pod \"ironic-operator-controller-manager-5bfcdc958c-8tgzv\" (UID: \"be0d238d-5b08-42e1-ac21-4e00592ab433\") " pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.770752 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.771747 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.772485 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.774426 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-nthgs" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.775924 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.778260 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.787607 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.791656 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.798903 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.805018 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-txg5q" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.827331 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjntg\" (UniqueName: \"kubernetes.io/projected/5c199b9d-786f-4520-a7bb-67f616b16b88-kube-api-access-bjntg\") pod \"octavia-operator-controller-manager-fd75fd47d-txbsq\" (UID: \"5c199b9d-786f-4520-a7bb-67f616b16b88\") " pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.827427 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdw6w\" (UniqueName: \"kubernetes.io/projected/8f91f389-91ad-4a56-9e71-5cf7bb88db01-kube-api-access-mdw6w\") pod \"neutron-operator-controller-manager-7c57c8bbc4-7mc5t\" (UID: \"8f91f389-91ad-4a56-9e71-5cf7bb88db01\") " pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.827614 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtgzm\" (UniqueName: \"kubernetes.io/projected/89a61837-ab76-494d-a98d-268fed9bbe35-kube-api-access-mtgzm\") pod \"keystone-operator-controller-manager-748dc6576f-5rjs9\" (UID: \"89a61837-ab76-494d-a98d-268fed9bbe35\") " pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.827704 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg7pp\" (UniqueName: \"kubernetes.io/projected/c5c5776f-3970-425a-b5a7-c4c859f821e0-kube-api-access-pg7pp\") pod \"nova-operator-controller-manager-79556f57fc-mq9fj\" (UID: \"c5c5776f-3970-425a-b5a7-c4c859f821e0\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.827829 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr8rv\" (UniqueName: \"kubernetes.io/projected/279425db-228b-4697-864f-e50d2eb66012-kube-api-access-fr8rv\") pod \"mariadb-operator-controller-manager-cb6c4fdb7-95pqd\" (UID: \"279425db-228b-4697-864f-e50d2eb66012\") " pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.828022 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7rmn\" (UniqueName: \"kubernetes.io/projected/bac13c1a-af96-4cb0-a802-ef2086f9f06b-kube-api-access-d7rmn\") pod \"manila-operator-controller-manager-58bb8d67cc-m7d57\" (UID: \"bac13c1a-af96-4cb0-a802-ef2086f9f06b\") " pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.830373 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.836505 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.887455 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.909746 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.910603 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.918149 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-gtl9r" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.920249 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.920670 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.930232 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58nn7\" (UniqueName: \"kubernetes.io/projected/2e38fffb-d3ee-488d-bbc7-811d4ba43797-kube-api-access-58nn7\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.930520 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtgzm\" (UniqueName: \"kubernetes.io/projected/89a61837-ab76-494d-a98d-268fed9bbe35-kube-api-access-mtgzm\") pod \"keystone-operator-controller-manager-748dc6576f-5rjs9\" (UID: \"89a61837-ab76-494d-a98d-268fed9bbe35\") " pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.930628 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg7pp\" (UniqueName: \"kubernetes.io/projected/c5c5776f-3970-425a-b5a7-c4c859f821e0-kube-api-access-pg7pp\") pod \"nova-operator-controller-manager-79556f57fc-mq9fj\" (UID: \"c5c5776f-3970-425a-b5a7-c4c859f821e0\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.930709 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr8rv\" (UniqueName: \"kubernetes.io/projected/279425db-228b-4697-864f-e50d2eb66012-kube-api-access-fr8rv\") pod \"mariadb-operator-controller-manager-cb6c4fdb7-95pqd\" (UID: \"279425db-228b-4697-864f-e50d2eb66012\") " pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.930829 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5qmk\" (UniqueName: \"kubernetes.io/projected/94c3a712-5baa-4789-ad81-8d4c0554d84b-kube-api-access-k5qmk\") pod \"ovn-operator-controller-manager-66cf5c67ff-pr6k5\" (UID: \"94c3a712-5baa-4789-ad81-8d4c0554d84b\") " pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.930973 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7rmn\" (UniqueName: \"kubernetes.io/projected/bac13c1a-af96-4cb0-a802-ef2086f9f06b-kube-api-access-d7rmn\") pod \"manila-operator-controller-manager-58bb8d67cc-m7d57\" (UID: \"bac13c1a-af96-4cb0-a802-ef2086f9f06b\") " pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.931143 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjntg\" (UniqueName: \"kubernetes.io/projected/5c199b9d-786f-4520-a7bb-67f616b16b88-kube-api-access-bjntg\") pod \"octavia-operator-controller-manager-fd75fd47d-txbsq\" (UID: \"5c199b9d-786f-4520-a7bb-67f616b16b88\") " pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.932632 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdw6w\" (UniqueName: \"kubernetes.io/projected/8f91f389-91ad-4a56-9e71-5cf7bb88db01-kube-api-access-mdw6w\") pod \"neutron-operator-controller-manager-7c57c8bbc4-7mc5t\" (UID: \"8f91f389-91ad-4a56-9e71-5cf7bb88db01\") " pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.932795 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.933861 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.936224 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.938080 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.939584 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.939874 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-j45bs" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.940850 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-nz7qk" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.957185 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.961519 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x"] Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.971923 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg7pp\" (UniqueName: \"kubernetes.io/projected/c5c5776f-3970-425a-b5a7-c4c859f821e0-kube-api-access-pg7pp\") pod \"nova-operator-controller-manager-79556f57fc-mq9fj\" (UID: \"c5c5776f-3970-425a-b5a7-c4c859f821e0\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.978930 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.981399 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-cn7hl" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.985913 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr8rv\" (UniqueName: \"kubernetes.io/projected/279425db-228b-4697-864f-e50d2eb66012-kube-api-access-fr8rv\") pod \"mariadb-operator-controller-manager-cb6c4fdb7-95pqd\" (UID: \"279425db-228b-4697-864f-e50d2eb66012\") " pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.987016 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdw6w\" (UniqueName: \"kubernetes.io/projected/8f91f389-91ad-4a56-9e71-5cf7bb88db01-kube-api-access-mdw6w\") pod \"neutron-operator-controller-manager-7c57c8bbc4-7mc5t\" (UID: \"8f91f389-91ad-4a56-9e71-5cf7bb88db01\") " pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.989918 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7rmn\" (UniqueName: \"kubernetes.io/projected/bac13c1a-af96-4cb0-a802-ef2086f9f06b-kube-api-access-d7rmn\") pod \"manila-operator-controller-manager-58bb8d67cc-m7d57\" (UID: \"bac13c1a-af96-4cb0-a802-ef2086f9f06b\") " pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:42:51 crc kubenswrapper[4879]: I1125 14:42:51.992694 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:51.998486 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjntg\" (UniqueName: \"kubernetes.io/projected/5c199b9d-786f-4520-a7bb-67f616b16b88-kube-api-access-bjntg\") pod \"octavia-operator-controller-manager-fd75fd47d-txbsq\" (UID: \"5c199b9d-786f-4520-a7bb-67f616b16b88\") " pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.007676 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.009163 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.013026 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-48gsd" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.013569 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.028288 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtgzm\" (UniqueName: \"kubernetes.io/projected/89a61837-ab76-494d-a98d-268fed9bbe35-kube-api-access-mtgzm\") pod \"keystone-operator-controller-manager-748dc6576f-5rjs9\" (UID: \"89a61837-ab76-494d-a98d-268fed9bbe35\") " pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.035939 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.036001 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58nn7\" (UniqueName: \"kubernetes.io/projected/2e38fffb-d3ee-488d-bbc7-811d4ba43797-kube-api-access-58nn7\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.036073 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5qmk\" (UniqueName: \"kubernetes.io/projected/94c3a712-5baa-4789-ad81-8d4c0554d84b-kube-api-access-k5qmk\") pod \"ovn-operator-controller-manager-66cf5c67ff-pr6k5\" (UID: \"94c3a712-5baa-4789-ad81-8d4c0554d84b\") " pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.036534 4879 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.036593 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert podName:2e38fffb-d3ee-488d-bbc7-811d4ba43797 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:52.536576735 +0000 UTC m=+1064.139989796 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert") pod "openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" (UID: "2e38fffb-d3ee-488d-bbc7-811d4ba43797") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.036624 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.068953 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.073731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58nn7\" (UniqueName: \"kubernetes.io/projected/2e38fffb-d3ee-488d-bbc7-811d4ba43797-kube-api-access-58nn7\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.081633 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5qmk\" (UniqueName: \"kubernetes.io/projected/94c3a712-5baa-4789-ad81-8d4c0554d84b-kube-api-access-k5qmk\") pod \"ovn-operator-controller-manager-66cf5c67ff-pr6k5\" (UID: \"94c3a712-5baa-4789-ad81-8d4c0554d84b\") " pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.091778 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cb74df96-52qvx"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.101241 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.107230 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cb74df96-52qvx"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.117268 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-mp6bw" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.129967 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.136177 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-864885998-vnr67"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.137068 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv4lq\" (UniqueName: \"kubernetes.io/projected/4ca6d024-306e-4707-abb0-1b57ed1e11b6-kube-api-access-bv4lq\") pod \"placement-operator-controller-manager-5db546f9d9-nk7tw\" (UID: \"4ca6d024-306e-4707-abb0-1b57ed1e11b6\") " pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.137189 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnsnv\" (UniqueName: \"kubernetes.io/projected/2037b3b9-3099-4f88-8e56-ec28ee25efa5-kube-api-access-mnsnv\") pod \"telemetry-operator-controller-manager-567f98c9d-zvb94\" (UID: \"2037b3b9-3099-4f88-8e56-ec28ee25efa5\") " pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.137233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-428kr\" (UniqueName: \"kubernetes.io/projected/81a1e752-3477-4e08-b151-874b0e503a1b-kube-api-access-428kr\") pod \"swift-operator-controller-manager-6fdc4fcf86-pf28x\" (UID: \"81a1e752-3477-4e08-b151-874b0e503a1b\") " pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.137478 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.140733 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.143151 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-5t2l2" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.144063 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-864885998-vnr67"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.150966 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.203204 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.204214 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.209868 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.210181 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.210328 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-c2rkz" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.218478 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.242176 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.243267 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.243837 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/afbf9f55-3316-40bb-b53b-d4d96482f9d5-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.243882 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw2q8\" (UniqueName: \"kubernetes.io/projected/9d5bb254-3519-4805-bbfa-c4fad026bec1-kube-api-access-mw2q8\") pod \"test-operator-controller-manager-5cb74df96-52qvx\" (UID: \"9d5bb254-3519-4805-bbfa-c4fad026bec1\") " pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.243933 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv4lq\" (UniqueName: \"kubernetes.io/projected/4ca6d024-306e-4707-abb0-1b57ed1e11b6-kube-api-access-bv4lq\") pod \"placement-operator-controller-manager-5db546f9d9-nk7tw\" (UID: \"4ca6d024-306e-4707-abb0-1b57ed1e11b6\") " pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.244019 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb628\" (UniqueName: \"kubernetes.io/projected/06ee2ae7-d534-4170-9862-53a2580c39ce-kube-api-access-tb628\") pod \"watcher-operator-controller-manager-864885998-vnr67\" (UID: \"06ee2ae7-d534-4170-9862-53a2580c39ce\") " pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.244083 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnsnv\" (UniqueName: \"kubernetes.io/projected/2037b3b9-3099-4f88-8e56-ec28ee25efa5-kube-api-access-mnsnv\") pod \"telemetry-operator-controller-manager-567f98c9d-zvb94\" (UID: \"2037b3b9-3099-4f88-8e56-ec28ee25efa5\") " pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.244154 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-428kr\" (UniqueName: \"kubernetes.io/projected/81a1e752-3477-4e08-b151-874b0e503a1b-kube-api-access-428kr\") pod \"swift-operator-controller-manager-6fdc4fcf86-pf28x\" (UID: \"81a1e752-3477-4e08-b151-874b0e503a1b\") " pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.257198 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/afbf9f55-3316-40bb-b53b-d4d96482f9d5-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-8tdb9\" (UID: \"afbf9f55-3316-40bb-b53b-d4d96482f9d5\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.260191 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.260580 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-tgpxw" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.260965 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.269133 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv4lq\" (UniqueName: \"kubernetes.io/projected/4ca6d024-306e-4707-abb0-1b57ed1e11b6-kube-api-access-bv4lq\") pod \"placement-operator-controller-manager-5db546f9d9-nk7tw\" (UID: \"4ca6d024-306e-4707-abb0-1b57ed1e11b6\") " pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.282417 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.317414 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnsnv\" (UniqueName: \"kubernetes.io/projected/2037b3b9-3099-4f88-8e56-ec28ee25efa5-kube-api-access-mnsnv\") pod \"telemetry-operator-controller-manager-567f98c9d-zvb94\" (UID: \"2037b3b9-3099-4f88-8e56-ec28ee25efa5\") " pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.330174 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-428kr\" (UniqueName: \"kubernetes.io/projected/81a1e752-3477-4e08-b151-874b0e503a1b-kube-api-access-428kr\") pod \"swift-operator-controller-manager-6fdc4fcf86-pf28x\" (UID: \"81a1e752-3477-4e08-b151-874b0e503a1b\") " pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.345050 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.346353 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw2q8\" (UniqueName: \"kubernetes.io/projected/9d5bb254-3519-4805-bbfa-c4fad026bec1-kube-api-access-mw2q8\") pod \"test-operator-controller-manager-5cb74df96-52qvx\" (UID: \"9d5bb254-3519-4805-bbfa-c4fad026bec1\") " pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.357217 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpjq6\" (UniqueName: \"kubernetes.io/projected/b7d6b37e-0aff-4496-b240-7770d1d23827-kube-api-access-dpjq6\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.357338 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb628\" (UniqueName: \"kubernetes.io/projected/06ee2ae7-d534-4170-9862-53a2580c39ce-kube-api-access-tb628\") pod \"watcher-operator-controller-manager-864885998-vnr67\" (UID: \"06ee2ae7-d534-4170-9862-53a2580c39ce\") " pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.357408 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.357466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctnrl\" (UniqueName: \"kubernetes.io/projected/ee61acb4-f03b-4e5c-996c-3b4436b8e676-kube-api-access-ctnrl\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zj2cr\" (UID: \"ee61acb4-f03b-4e5c-996c-3b4436b8e676\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.357642 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.371081 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.383895 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.391117 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw2q8\" (UniqueName: \"kubernetes.io/projected/9d5bb254-3519-4805-bbfa-c4fad026bec1-kube-api-access-mw2q8\") pod \"test-operator-controller-manager-5cb74df96-52qvx\" (UID: \"9d5bb254-3519-4805-bbfa-c4fad026bec1\") " pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.397882 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.418761 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb628\" (UniqueName: \"kubernetes.io/projected/06ee2ae7-d534-4170-9862-53a2580c39ce-kube-api-access-tb628\") pod \"watcher-operator-controller-manager-864885998-vnr67\" (UID: \"06ee2ae7-d534-4170-9862-53a2580c39ce\") " pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.467290 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.467342 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctnrl\" (UniqueName: \"kubernetes.io/projected/ee61acb4-f03b-4e5c-996c-3b4436b8e676-kube-api-access-ctnrl\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zj2cr\" (UID: \"ee61acb4-f03b-4e5c-996c-3b4436b8e676\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.467421 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.467491 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpjq6\" (UniqueName: \"kubernetes.io/projected/b7d6b37e-0aff-4496-b240-7770d1d23827-kube-api-access-dpjq6\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.467957 4879 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.468014 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs podName:b7d6b37e-0aff-4496-b240-7770d1d23827 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:52.967989558 +0000 UTC m=+1064.571402629 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs") pod "openstack-operator-controller-manager-77bf44fb75-xdhlf" (UID: "b7d6b37e-0aff-4496-b240-7770d1d23827") : secret "webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.468424 4879 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.468466 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs podName:b7d6b37e-0aff-4496-b240-7770d1d23827 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:52.968454891 +0000 UTC m=+1064.571867962 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs") pod "openstack-operator-controller-manager-77bf44fb75-xdhlf" (UID: "b7d6b37e-0aff-4496-b240-7770d1d23827") : secret "metrics-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.468816 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.480561 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.508866 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.519371 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpjq6\" (UniqueName: \"kubernetes.io/projected/b7d6b37e-0aff-4496-b240-7770d1d23827-kube-api-access-dpjq6\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.524999 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctnrl\" (UniqueName: \"kubernetes.io/projected/ee61acb4-f03b-4e5c-996c-3b4436b8e676-kube-api-access-ctnrl\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zj2cr\" (UID: \"ee61acb4-f03b-4e5c-996c-3b4436b8e676\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.538075 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.568680 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.568867 4879 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.568940 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert podName:2e38fffb-d3ee-488d-bbc7-811d4ba43797 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:53.568918274 +0000 UTC m=+1065.172331345 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert") pod "openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" (UID: "2e38fffb-d3ee-488d-bbc7-811d4ba43797") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.583884 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.680574 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.728556 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5"] Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.974838 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: I1125 14:42:52.975231 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.975090 4879 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.975428 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs podName:b7d6b37e-0aff-4496-b240-7770d1d23827 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:53.975412955 +0000 UTC m=+1065.578826026 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs") pod "openstack-operator-controller-manager-77bf44fb75-xdhlf" (UID: "b7d6b37e-0aff-4496-b240-7770d1d23827") : secret "metrics-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.975378 4879 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 25 14:42:52 crc kubenswrapper[4879]: E1125 14:42:52.975818 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs podName:b7d6b37e-0aff-4496-b240-7770d1d23827 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:53.975809366 +0000 UTC m=+1065.579222437 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs") pod "openstack-operator-controller-manager-77bf44fb75-xdhlf" (UID: "b7d6b37e-0aff-4496-b240-7770d1d23827") : secret "webhook-server-cert" not found Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.319838 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerStarted","Data":"e52c70addbce0506f2a703a8e418ac1a8ab70e83b148bb29ab92cafbcb6cccc3"} Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.324092 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerStarted","Data":"9ebdd303728a68f586450c5726f3066b5c52305090e25a40d4ddc914c64fd60d"} Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.324910 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerStarted","Data":"0e9d2418b6c25757eda0a079180a5106f1fd4d28cc7c5ec762f21d578182cae7"} Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.584364 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.591058 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e38fffb-d3ee-488d-bbc7-811d4ba43797-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf\" (UID: \"2e38fffb-d3ee-488d-bbc7-811d4ba43797\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.725192 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.736548 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.748986 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.781188 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:42:53 crc kubenswrapper[4879]: W1125 14:42:53.781452 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod230849f3_daef_4f23_9839_8f0bd76d8e4a.slice/crio-3093685a228e5e614c861abb7f113a74de447a1de109e11e13f4f605f9e52d87 WatchSource:0}: Error finding container 3093685a228e5e614c861abb7f113a74de447a1de109e11e13f4f605f9e52d87: Status 404 returned error can't find the container with id 3093685a228e5e614c861abb7f113a74de447a1de109e11e13f4f605f9e52d87 Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.783093 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.807523 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.845570 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.853653 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.860693 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq"] Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.994455 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:53 crc kubenswrapper[4879]: I1125 14:42:53.994612 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:53 crc kubenswrapper[4879]: E1125 14:42:53.994803 4879 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 25 14:42:53 crc kubenswrapper[4879]: E1125 14:42:53.994876 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs podName:b7d6b37e-0aff-4496-b240-7770d1d23827 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:55.994857825 +0000 UTC m=+1067.598270896 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs") pod "openstack-operator-controller-manager-77bf44fb75-xdhlf" (UID: "b7d6b37e-0aff-4496-b240-7770d1d23827") : secret "webhook-server-cert" not found Nov 25 14:42:53 crc kubenswrapper[4879]: E1125 14:42:53.994898 4879 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 25 14:42:53 crc kubenswrapper[4879]: E1125 14:42:53.994974 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs podName:b7d6b37e-0aff-4496-b240-7770d1d23827 nodeName:}" failed. No retries permitted until 2025-11-25 14:42:55.994953578 +0000 UTC m=+1067.598366639 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs") pod "openstack-operator-controller-manager-77bf44fb75-xdhlf" (UID: "b7d6b37e-0aff-4496-b240-7770d1d23827") : secret "metrics-server-cert" not found Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.024326 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-864885998-vnr67"] Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.032734 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9"] Nov 25 14:42:54 crc kubenswrapper[4879]: W1125 14:42:54.033356 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06ee2ae7_d534_4170_9862_53a2580c39ce.slice/crio-36cc35948ce37a361c1c5231e5123099e64877405ddc407a3ca0d9f6c84b9d7a WatchSource:0}: Error finding container 36cc35948ce37a361c1c5231e5123099e64877405ddc407a3ca0d9f6c84b9d7a: Status 404 returned error can't find the container with id 36cc35948ce37a361c1c5231e5123099e64877405ddc407a3ca0d9f6c84b9d7a Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.056907 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5"] Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.076270 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x"] Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.084178 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr"] Nov 25 14:42:54 crc kubenswrapper[4879]: W1125 14:42:54.090183 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbac13c1a_af96_4cb0_a802_ef2086f9f06b.slice/crio-aecdf2dadfb2c742f47a6c6da16421be21fa98f6068ca251c726a14f1ae4a9af WatchSource:0}: Error finding container aecdf2dadfb2c742f47a6c6da16421be21fa98f6068ca251c726a14f1ae4a9af: Status 404 returned error can't find the container with id aecdf2dadfb2c742f47a6c6da16421be21fa98f6068ca251c726a14f1ae4a9af Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.090527 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57"] Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.100289 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw"] Nov 25 14:42:54 crc kubenswrapper[4879]: W1125 14:42:54.104037 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee61acb4_f03b_4e5c_996c_3b4436b8e676.slice/crio-04e057302c2595a5578749ea58723ef38769dfb4d898c9b1545b9c8ed1ff5fa8 WatchSource:0}: Error finding container 04e057302c2595a5578749ea58723ef38769dfb4d898c9b1545b9c8ed1ff5fa8: Status 404 returned error can't find the container with id 04e057302c2595a5578749ea58723ef38769dfb4d898c9b1545b9c8ed1ff5fa8 Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.109097 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd"] Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.112549 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d7rmn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.112647 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ctnrl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-zj2cr_openstack-operators(ee61acb4-f03b-4e5c-996c-3b4436b8e676): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.114283 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bv4lq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5db546f9d9-nk7tw_openstack-operators(4ca6d024-306e-4707-abb0-1b57ed1e11b6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.114361 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.115015 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k5qmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-66cf5c67ff-pr6k5_openstack-operators(94c3a712-5baa-4789-ad81-8d4c0554d84b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.115102 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d7rmn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.116197 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.117347 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k5qmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-66cf5c67ff-pr6k5_openstack-operators(94c3a712-5baa-4789-ad81-8d4c0554d84b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.117356 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bv4lq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5db546f9d9-nk7tw_openstack-operators(4ca6d024-306e-4707-abb0-1b57ed1e11b6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.117486 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fr8rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.119210 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podUID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.119207 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.122216 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fr8rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.123364 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.238217 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cb74df96-52qvx"] Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.310940 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94"] Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.322819 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mnsnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-567f98c9d-zvb94_openstack-operators(2037b3b9-3099-4f88-8e56-ec28ee25efa5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.338907 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mnsnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-567f98c9d-zvb94_openstack-operators(2037b3b9-3099-4f88-8e56-ec28ee25efa5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.341812 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podUID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.382584 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerStarted","Data":"04e057302c2595a5578749ea58723ef38769dfb4d898c9b1545b9c8ed1ff5fa8"} Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.395186 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.410414 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" event={"ID":"afbf9f55-3316-40bb-b53b-d4d96482f9d5","Type":"ContainerStarted","Data":"9eba43a076a36d73c09c7e6c563371094ec0a7162fbef29f451bb8f49073baec"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.450359 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" event={"ID":"5c199b9d-786f-4520-a7bb-67f616b16b88","Type":"ContainerStarted","Data":"5a7ead8214a716dbf3ee0965014b4f96c29e2409233ad1fef99650dbacb1a779"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.459875 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" event={"ID":"89a61837-ab76-494d-a98d-268fed9bbe35","Type":"ContainerStarted","Data":"89b5f6fa8187132430c0763db5a788270f7dad86c902c9a0c15c1325c651ad4c"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.460153 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf"] Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.461200 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerStarted","Data":"aecdf2dadfb2c742f47a6c6da16421be21fa98f6068ca251c726a14f1ae4a9af"} Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.467223 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.468017 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerStarted","Data":"ed2f887db839f132e1c8cd5b310ebc2f225e0162c863af44e79d1e909fd16391"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.485615 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerStarted","Data":"80d936d6f277551cd0828c9f3d028d4be0b5a351a4d5b70cc7719bab88c5bc33"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.487350 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" event={"ID":"94c3a712-5baa-4789-ad81-8d4c0554d84b","Type":"ContainerStarted","Data":"12e6221919aa8a1a4262a2f24dfe012749987a1ca6172045ccf8da7b99d93004"} Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.490684 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.494898 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerStarted","Data":"36cc35948ce37a361c1c5231e5123099e64877405ddc407a3ca0d9f6c84b9d7a"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.503304 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" event={"ID":"4ca6d024-306e-4707-abb0-1b57ed1e11b6","Type":"ContainerStarted","Data":"762dac29734c5d2a00406f87e77d3fdd59048546ddd44a0fbb7fb7a063d4004a"} Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.505594 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podUID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.505906 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerStarted","Data":"f2baa9bb4cbfa0649ec30f51b4f8cf787a435046c95c72504d75bec0ce91d85b"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.506725 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerStarted","Data":"c85f6a2f84c82cb390912d89e992296b061a7c7a9ec92aa6051217c678a27e61"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.511573 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" event={"ID":"cefe5024-a03a-427e-84a5-a4f6eac64f12","Type":"ContainerStarted","Data":"02f57e18dd9dacced38bf99ac6d99fe02649d71057db48b457c0fa95b9b0140d"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.513324 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerStarted","Data":"d53a232f9ee91d6a16db430e9cf53885e6480e7d80244733c65d2727201f238d"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.514588 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerStarted","Data":"15a12860ffa7388aebb76874757663db9dde7a604c655e1f3d14f8e6e294757f"} Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.529888 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.530377 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" event={"ID":"2037b3b9-3099-4f88-8e56-ec28ee25efa5","Type":"ContainerStarted","Data":"8493478e66ad14e011215f0dc8f18c470dff3c3a86e9d379cb37cb41e2ffd207"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.531330 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerStarted","Data":"3093685a228e5e614c861abb7f113a74de447a1de109e11e13f4f605f9e52d87"} Nov 25 14:42:54 crc kubenswrapper[4879]: I1125 14:42:54.533224 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" event={"ID":"9d5bb254-3519-4805-bbfa-c4fad026bec1","Type":"ContainerStarted","Data":"34eece0576dd1760bdff8100b3542a55ce051e5e4b549e78b25a08b88dd50dc5"} Nov 25 14:42:54 crc kubenswrapper[4879]: E1125 14:42:54.533680 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podUID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" Nov 25 14:42:55 crc kubenswrapper[4879]: I1125 14:42:55.543432 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" event={"ID":"2e38fffb-d3ee-488d-bbc7-811d4ba43797","Type":"ContainerStarted","Data":"5740c836a2268646587e1df390289facbb521561d58ad587dfd7173d9e72d7c0"} Nov 25 14:42:55 crc kubenswrapper[4879]: E1125 14:42:55.546706 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:42:55 crc kubenswrapper[4879]: E1125 14:42:55.546854 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podUID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" Nov 25 14:42:55 crc kubenswrapper[4879]: E1125 14:42:55.547465 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:42:55 crc kubenswrapper[4879]: E1125 14:42:55.547707 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podUID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" Nov 25 14:42:55 crc kubenswrapper[4879]: E1125 14:42:55.547893 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:42:55 crc kubenswrapper[4879]: E1125 14:42:55.548540 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:42:56 crc kubenswrapper[4879]: I1125 14:42:56.035366 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:56 crc kubenswrapper[4879]: I1125 14:42:56.035457 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:56 crc kubenswrapper[4879]: I1125 14:42:56.041409 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-metrics-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:56 crc kubenswrapper[4879]: I1125 14:42:56.057065 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b7d6b37e-0aff-4496-b240-7770d1d23827-webhook-certs\") pod \"openstack-operator-controller-manager-77bf44fb75-xdhlf\" (UID: \"b7d6b37e-0aff-4496-b240-7770d1d23827\") " pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:42:56 crc kubenswrapper[4879]: I1125 14:42:56.251986 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:43:08 crc kubenswrapper[4879]: E1125 14:43:08.584199 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:3ef72bbd7cce89ff54d850ff44ca6d7b2360834a502da3d561aeb6fd3d9af50a" Nov 25 14:43:08 crc kubenswrapper[4879]: E1125 14:43:08.584903 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:3ef72bbd7cce89ff54d850ff44ca6d7b2360834a502da3d561aeb6fd3d9af50a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mtgzm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-748dc6576f-5rjs9_openstack-operators(89a61837-ab76-494d-a98d-268fed9bbe35): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:14 crc kubenswrapper[4879]: E1125 14:43:14.874143 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:442c269d79163f8da75505019c02e9f0815837aaadcaddacb8e6c12df297ca13" Nov 25 14:43:14 crc kubenswrapper[4879]: E1125 14:43:14.874913 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:442c269d79163f8da75505019c02e9f0815837aaadcaddacb8e6c12df297ca13,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bjntg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-fd75fd47d-txbsq_openstack-operators(5c199b9d-786f-4520-a7bb-67f616b16b88): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:19 crc kubenswrapper[4879]: E1125 14:43:19.163157 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894" Nov 25 14:43:19 crc kubenswrapper[4879]: E1125 14:43:19.163959 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-44r6s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-d5cc86f4b-8tdb9_openstack-operators(afbf9f55-3316-40bb-b53b-d4d96482f9d5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:35 crc kubenswrapper[4879]: E1125 14:43:35.649020 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:78852f8ba332a5756c1551c126157f735279101a0fc3277ba4aa4db3478789dd" Nov 25 14:43:35 crc kubenswrapper[4879]: E1125 14:43:35.651069 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:78852f8ba332a5756c1551c126157f735279101a0fc3277ba4aa4db3478789dd,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-58nn7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf_openstack-operators(2e38fffb-d3ee-488d-bbc7-811d4ba43797): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:37 crc kubenswrapper[4879]: E1125 14:43:37.133657 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b" Nov 25 14:43:37 crc kubenswrapper[4879]: E1125 14:43:37.134151 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k5qmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-66cf5c67ff-pr6k5_openstack-operators(94c3a712-5baa-4789-ad81-8d4c0554d84b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:37 crc kubenswrapper[4879]: E1125 14:43:37.648213 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a" Nov 25 14:43:37 crc kubenswrapper[4879]: E1125 14:43:37.648382 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:b749a5dd8bc718875c3f5e81b38d54d003be77ab92de4a3e9f9595566496a58a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d7rmn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:38 crc kubenswrapper[4879]: E1125 14:43:38.269148 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04" Nov 25 14:43:38 crc kubenswrapper[4879]: E1125 14:43:38.269707 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fr8rv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:39 crc kubenswrapper[4879]: E1125 14:43:39.807571 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c" Nov 25 14:43:39 crc kubenswrapper[4879]: E1125 14:43:39.807780 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bv4lq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5db546f9d9-nk7tw_openstack-operators(4ca6d024-306e-4707-abb0-1b57ed1e11b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:42 crc kubenswrapper[4879]: E1125 14:43:42.245775 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f" Nov 25 14:43:42 crc kubenswrapper[4879]: E1125 14:43:42.246041 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tb628,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-864885998-vnr67_openstack-operators(06ee2ae7-d534-4170-9862-53a2580c39ce): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:42 crc kubenswrapper[4879]: E1125 14:43:42.544215 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f" Nov 25 14:43:42 crc kubenswrapper[4879]: E1125 14:43:42.544434 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mnsnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-567f98c9d-zvb94_openstack-operators(2037b3b9-3099-4f88-8e56-ec28ee25efa5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:43 crc kubenswrapper[4879]: E1125 14:43:43.077087 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:43:43 crc kubenswrapper[4879]: E1125 14:43:43.077644 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mtgzm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-748dc6576f-5rjs9_openstack-operators(89a61837-ab76-494d-a98d-268fed9bbe35): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Nov 25 14:43:43 crc kubenswrapper[4879]: E1125 14:43:43.078891 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" podUID="89a61837-ab76-494d-a98d-268fed9bbe35" Nov 25 14:43:43 crc kubenswrapper[4879]: I1125 14:43:43.339905 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf"] Nov 25 14:43:44 crc kubenswrapper[4879]: E1125 14:43:44.065311 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Nov 25 14:43:44 crc kubenswrapper[4879]: E1125 14:43:44.066038 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ctnrl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-zj2cr_openstack-operators(ee61acb4-f03b-4e5c-996c-3b4436b8e676): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:44 crc kubenswrapper[4879]: E1125 14:43:44.067309 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:43:44 crc kubenswrapper[4879]: I1125 14:43:44.902403 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" event={"ID":"b7d6b37e-0aff-4496-b240-7770d1d23827","Type":"ContainerStarted","Data":"5b29332e6761e2f1d3aefc5db0c9aa3ff4f12cc26c97c23c97e44d7754042209"} Nov 25 14:43:46 crc kubenswrapper[4879]: E1125 14:43:46.447368 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rdfnw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 25 14:43:46 crc kubenswrapper[4879]: E1125 14:43:46.448891 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.915777 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerStarted","Data":"d7650819c2f2f67dcf84527f9e5a4b89c132fef6e147dcc2fe04b3414b4a8f8a"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.917257 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerStarted","Data":"401f57fd2107333250f571a6331447e0a331329da55c67f02d5e2817483bf2a5"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.918471 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerStarted","Data":"59dfc0b1de9583c9b896942444207c4ab9e0e1b47ec26aa98f1ff61547fdfe37"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.919726 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerStarted","Data":"e11b3cc00fbcbf30f8ef921eff3eb4c6b508be12f95360187f09457b5a39a60a"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.920937 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerStarted","Data":"61cb4c2adb1ddf2c09a4c875d136aee7431d83f27a1baee40e1375d5e47f658b"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.921579 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:43:46 crc kubenswrapper[4879]: E1125 14:43:46.922771 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.923499 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerStarted","Data":"6f796f780c99704b8944b1370e22ce207ec3fb2fd5dc0eeefc613feaea57e477"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.926079 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" event={"ID":"9d5bb254-3519-4805-bbfa-c4fad026bec1","Type":"ContainerStarted","Data":"5cb04c89c2f97f2f7504da2289fed6cedb609fa08a9639505d9674234da492c0"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.927543 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerStarted","Data":"2dc0817f3db5d45a26bafb93b0a9a549a0337190229d0aeb6a8ace1dc12bc769"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.929290 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" event={"ID":"b7d6b37e-0aff-4496-b240-7770d1d23827","Type":"ContainerStarted","Data":"6fe1066e6eaee9b2ac399fafd519cbabb8e8df7b52f5b54a3a0d61f803e71861"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.929413 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.932070 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerStarted","Data":"55b6a97a438917b8e123bbd1a64cb3e372526f39a06fc954aa7ea5075755dedb"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.934017 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerStarted","Data":"159d936594bd06ac88ccc353452a14c27042af95c9a9f3daeecc4055b01156e5"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.935620 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" event={"ID":"cefe5024-a03a-427e-84a5-a4f6eac64f12","Type":"ContainerStarted","Data":"03209e6d6dda01372cc565118ed0a6d94ff308b582a0a97b12f19fb9f7c90944"} Nov 25 14:43:46 crc kubenswrapper[4879]: I1125 14:43:46.966781 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" podStartSLOduration=54.966755388 podStartE2EDuration="54.966755388s" podCreationTimestamp="2025-11-25 14:42:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:43:46.963326793 +0000 UTC m=+1118.566739874" watchObservedRunningTime="2025-11-25 14:43:46.966755388 +0000 UTC m=+1118.570168459" Nov 25 14:43:47 crc kubenswrapper[4879]: E1125 14:43:47.943177 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:43:51 crc kubenswrapper[4879]: I1125 14:43:51.753727 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:43:51 crc kubenswrapper[4879]: E1125 14:43:51.755858 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:43:56 crc kubenswrapper[4879]: I1125 14:43:56.259302 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:43:57 crc kubenswrapper[4879]: E1125 14:43:57.646092 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:43:58 crc kubenswrapper[4879]: E1125 14:43:58.646704 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:43:58 crc kubenswrapper[4879]: E1125 14:43:58.646920 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d7rmn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:43:58 crc kubenswrapper[4879]: E1125 14:43:58.649502 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:44:12 crc kubenswrapper[4879]: E1125 14:44:12.860279 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:44:12 crc kubenswrapper[4879]: E1125 14:44:12.861779 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bv4lq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5db546f9d9-nk7tw_openstack-operators(4ca6d024-306e-4707-abb0-1b57ed1e11b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:44:12 crc kubenswrapper[4879]: E1125 14:44:12.863062 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podUID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.279876 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.280055 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.280361 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-428kr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6fdc4fcf86-pf28x_openstack-operators(81a1e752-3477-4e08-b151-874b0e503a1b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.280472 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mw2q8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cb74df96-52qvx_openstack-operators(9d5bb254-3519-4805-bbfa-c4fad026bec1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.281599 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" podUID="81a1e752-3477-4e08-b151-874b0e503a1b" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.281615 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" podUID="9d5bb254-3519-4805-bbfa-c4fad026bec1" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.287047 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.287493 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-29cv9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:44:13 crc kubenswrapper[4879]: E1125 14:44:13.289000 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:44:14 crc kubenswrapper[4879]: I1125 14:44:14.102044 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:44:14 crc kubenswrapper[4879]: I1125 14:44:14.105775 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:44:19 crc kubenswrapper[4879]: E1125 14:44:19.171344 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Nov 25 14:44:19 crc kubenswrapper[4879]: E1125 14:44:19.171839 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5vztd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:44:19 crc kubenswrapper[4879]: E1125 14:44:19.173432 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:44:20 crc kubenswrapper[4879]: I1125 14:44:20.139226 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:44:20 crc kubenswrapper[4879]: I1125 14:44:20.142965 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:44:22 crc kubenswrapper[4879]: I1125 14:44:22.371979 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:44:22 crc kubenswrapper[4879]: I1125 14:44:22.374222 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:44:22 crc kubenswrapper[4879]: I1125 14:44:22.539733 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:44:22 crc kubenswrapper[4879]: I1125 14:44:22.541669 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:44:24 crc kubenswrapper[4879]: I1125 14:44:24.166296 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" event={"ID":"2037b3b9-3099-4f88-8e56-ec28ee25efa5","Type":"ContainerStarted","Data":"d87c9da664ca53bc78a4de89e608f6363492d995b9dd8726e26a49f565cd796a"} Nov 25 14:44:24 crc kubenswrapper[4879]: I1125 14:44:24.168001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerStarted","Data":"21dfd994c7bee03cea641bc2ebf259e714d2baaa6a48d82b33db7170948aa834"} Nov 25 14:44:24 crc kubenswrapper[4879]: I1125 14:44:24.169286 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" event={"ID":"89a61837-ab76-494d-a98d-268fed9bbe35","Type":"ContainerStarted","Data":"0b9bd2c2272b98f2159dae8ca1c2bf74fdbdec97908ea08773e05724c87145ea"} Nov 25 14:44:24 crc kubenswrapper[4879]: E1125 14:44:24.217116 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podUID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" Nov 25 14:44:24 crc kubenswrapper[4879]: E1125 14:44:24.901919 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podUID="06ee2ae7-d534-4170-9862-53a2580c39ce" Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.179987 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" event={"ID":"cefe5024-a03a-427e-84a5-a4f6eac64f12","Type":"ContainerStarted","Data":"ce92ca9f5249a3e44899ded7fca5784cb203d863c7e0f1d99068f78d4aa3ba8f"} Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.181624 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerStarted","Data":"ee87f6e5093309eb33265891cc68f9d6f9d2c32b06180e3bebb826343def1c36"} Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.182785 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.185474 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerStarted","Data":"476ee0a62977dd74db409a7048b67089d0aa224337e4aeeb68f1c3980b62bc55"} Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.185893 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.188073 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerStarted","Data":"fdee0b7f2384154045094a393bf81099b82cd1e0a2d6e811335f41da036738b9"} Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.204289 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podStartSLOduration=19.679727647 podStartE2EDuration="1m34.204272518s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.765658333 +0000 UTC m=+1065.369071404" lastFinishedPulling="2025-11-25 14:44:08.290203204 +0000 UTC m=+1139.893616275" observedRunningTime="2025-11-25 14:44:25.197806279 +0000 UTC m=+1156.801219370" watchObservedRunningTime="2025-11-25 14:44:25.204272518 +0000 UTC m=+1156.807685599" Nov 25 14:44:25 crc kubenswrapper[4879]: I1125 14:44:25.271647 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podStartSLOduration=78.058455341 podStartE2EDuration="1m34.271627321s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:52.508367971 +0000 UTC m=+1064.111781032" lastFinishedPulling="2025-11-25 14:43:08.721539941 +0000 UTC m=+1080.324953012" observedRunningTime="2025-11-25 14:44:25.267466416 +0000 UTC m=+1156.870879507" watchObservedRunningTime="2025-11-25 14:44:25.271627321 +0000 UTC m=+1156.875040392" Nov 25 14:44:26 crc kubenswrapper[4879]: I1125 14:44:26.194650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" event={"ID":"2e38fffb-d3ee-488d-bbc7-811d4ba43797","Type":"ContainerStarted","Data":"4eb12b7a8037d35c3784edba680d0aa49f3a56586d837cc63446ae1052a31a84"} Nov 25 14:44:26 crc kubenswrapper[4879]: I1125 14:44:26.207657 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" podStartSLOduration=20.578421121 podStartE2EDuration="1m35.207639832s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.850678387 +0000 UTC m=+1065.454091458" lastFinishedPulling="2025-11-25 14:44:08.479897098 +0000 UTC m=+1140.083310169" observedRunningTime="2025-11-25 14:44:26.205690628 +0000 UTC m=+1157.809103709" watchObservedRunningTime="2025-11-25 14:44:26.207639832 +0000 UTC m=+1157.811052903" Nov 25 14:44:26 crc kubenswrapper[4879]: I1125 14:44:26.227865 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" podStartSLOduration=20.59997615 podStartE2EDuration="1m35.227841533s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.856623092 +0000 UTC m=+1065.460036163" lastFinishedPulling="2025-11-25 14:44:08.484488475 +0000 UTC m=+1140.087901546" observedRunningTime="2025-11-25 14:44:26.225895489 +0000 UTC m=+1157.829308580" watchObservedRunningTime="2025-11-25 14:44:26.227841533 +0000 UTC m=+1157.831254624" Nov 25 14:44:26 crc kubenswrapper[4879]: E1125 14:44:26.251564 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" podUID="2e38fffb-d3ee-488d-bbc7-811d4ba43797" Nov 25 14:44:26 crc kubenswrapper[4879]: E1125 14:44:26.596347 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:44:26 crc kubenswrapper[4879]: E1125 14:44:26.617418 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:44:26 crc kubenswrapper[4879]: E1125 14:44:26.617649 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" podUID="5c199b9d-786f-4520-a7bb-67f616b16b88" Nov 25 14:44:26 crc kubenswrapper[4879]: E1125 14:44:26.617776 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" podUID="afbf9f55-3316-40bb-b53b-d4d96482f9d5" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.203557 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerStarted","Data":"341705100c60be0c2c432a53bd618f95264b402b12774bbf9e32bd5cda67aa46"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.205918 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" event={"ID":"89a61837-ab76-494d-a98d-268fed9bbe35","Type":"ContainerStarted","Data":"a1801487959f078315e26a600490fe1b400020a5c53cc822debaa6fcb3c05b92"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.206051 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.207336 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerStarted","Data":"b68868d992497f15d66b4392e4402c457509eb6aa27993ea13fa11e819ba99fc"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.208597 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" event={"ID":"afbf9f55-3316-40bb-b53b-d4d96482f9d5","Type":"ContainerStarted","Data":"3d2fd5bf236504525ae44ac848f2bd0faf803b8cb84c372c3b3bb721fc9b52cb"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.209773 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerStarted","Data":"addf4e923d5c385b251d312023f6c77757e564cbf2e0462dec54c754690ecfc8"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.211701 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" event={"ID":"94c3a712-5baa-4789-ad81-8d4c0554d84b","Type":"ContainerStarted","Data":"e30274eab44fb23bdedebdaecff818c707b95981156598e6d56968bde5ec0f7c"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.214509 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerStarted","Data":"e73373b97683397bb86692dff5ce2e0ec9c10b6adac86697191cd17410fea7a9"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.214845 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.217826 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.219443 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerStarted","Data":"f3f7cd30d4a77fe26456a97ce5126f37468147a22309cae52966e66608940544"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.220053 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.222792 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerStarted","Data":"a7d2ed2a4104973561203cf739fa8765cc49a88494f60aa84cd4f12574903ac9"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.223233 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.225772 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" event={"ID":"9d5bb254-3519-4805-bbfa-c4fad026bec1","Type":"ContainerStarted","Data":"90f647186db4c71db87cdbfbc7247732aa8df499886b9370a527c6a146cbb48e"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.229418 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" event={"ID":"5c199b9d-786f-4520-a7bb-67f616b16b88","Type":"ContainerStarted","Data":"9a2bff52cc8232a67a6ea699bbdc9afb0c468cba92c36690c049d09130c63c8f"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.229626 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podStartSLOduration=50.952352206 podStartE2EDuration="1m36.229605772s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.739148816 +0000 UTC m=+1065.342561887" lastFinishedPulling="2025-11-25 14:43:39.016402382 +0000 UTC m=+1110.619815453" observedRunningTime="2025-11-25 14:44:27.224718265 +0000 UTC m=+1158.828131336" watchObservedRunningTime="2025-11-25 14:44:27.229605772 +0000 UTC m=+1158.833018843" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.234907 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerStarted","Data":"66c38d4ab031a27670cd57bf800d8243edb72a5255a65039792a6f67aba3dbb7"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.234946 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerStarted","Data":"8990e3b8a000b763557590fc073a778b9f0a2151f2cacf3f7c1cee8295a55769"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.235057 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.237044 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerStarted","Data":"70e11940910f632237d4a6818a226aefd1732e3f80c577a64b3911ca74dd6db2"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.239468 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerStarted","Data":"eef8016018b8e57c01cca81fba50c544a66b8d1093c2994ac5e9f2f96eedbbbd"} Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.246766 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podStartSLOduration=4.847452925 podStartE2EDuration="1m35.246749208s" podCreationTimestamp="2025-11-25 14:42:52 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.112458644 +0000 UTC m=+1065.715871715" lastFinishedPulling="2025-11-25 14:44:24.511754927 +0000 UTC m=+1156.115167998" observedRunningTime="2025-11-25 14:44:27.238707604 +0000 UTC m=+1158.842120675" watchObservedRunningTime="2025-11-25 14:44:27.246749208 +0000 UTC m=+1158.850162279" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.270785 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" podStartSLOduration=21.607978552 podStartE2EDuration="1m36.270765456s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.816366573 +0000 UTC m=+1065.419779644" lastFinishedPulling="2025-11-25 14:44:08.479153467 +0000 UTC m=+1140.082566548" observedRunningTime="2025-11-25 14:44:27.267084054 +0000 UTC m=+1158.870497135" watchObservedRunningTime="2025-11-25 14:44:27.270765456 +0000 UTC m=+1158.874178537" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.294900 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podStartSLOduration=78.075575248 podStartE2EDuration="1m36.294878007s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.011898529 +0000 UTC m=+1064.615311600" lastFinishedPulling="2025-11-25 14:43:11.231201288 +0000 UTC m=+1082.834614359" observedRunningTime="2025-11-25 14:44:27.283319625 +0000 UTC m=+1158.886732706" watchObservedRunningTime="2025-11-25 14:44:27.294878007 +0000 UTC m=+1158.898291078" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.348756 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podStartSLOduration=21.652582792 podStartE2EDuration="1m36.348731473s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.783466699 +0000 UTC m=+1065.386879770" lastFinishedPulling="2025-11-25 14:44:08.47961538 +0000 UTC m=+1140.083028451" observedRunningTime="2025-11-25 14:44:27.346556172 +0000 UTC m=+1158.949969243" watchObservedRunningTime="2025-11-25 14:44:27.348731473 +0000 UTC m=+1158.952144544" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.393657 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" podStartSLOduration=10.160197338 podStartE2EDuration="1m36.393638252s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.854106492 +0000 UTC m=+1065.457519563" lastFinishedPulling="2025-11-25 14:44:20.087547406 +0000 UTC m=+1151.690960477" observedRunningTime="2025-11-25 14:44:27.390866565 +0000 UTC m=+1158.994279636" watchObservedRunningTime="2025-11-25 14:44:27.393638252 +0000 UTC m=+1158.997051323" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.491395 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podStartSLOduration=8.955283412 podStartE2EDuration="1m36.491374169s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:52.551603253 +0000 UTC m=+1064.155016324" lastFinishedPulling="2025-11-25 14:44:20.08769401 +0000 UTC m=+1151.691107081" observedRunningTime="2025-11-25 14:44:27.489348312 +0000 UTC m=+1159.092761403" watchObservedRunningTime="2025-11-25 14:44:27.491374169 +0000 UTC m=+1159.094787240" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.495814 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" podStartSLOduration=52.346881354 podStartE2EDuration="1m36.495790162s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.112209107 +0000 UTC m=+1065.715622178" lastFinishedPulling="2025-11-25 14:43:38.261117915 +0000 UTC m=+1109.864530986" observedRunningTime="2025-11-25 14:44:27.466170338 +0000 UTC m=+1159.069583439" watchObservedRunningTime="2025-11-25 14:44:27.495790162 +0000 UTC m=+1159.099203233" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.509889 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" podStartSLOduration=52.494356814 podStartE2EDuration="1m36.509870373s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.245561975 +0000 UTC m=+1065.848975046" lastFinishedPulling="2025-11-25 14:43:38.261075544 +0000 UTC m=+1109.864488605" observedRunningTime="2025-11-25 14:44:27.50725074 +0000 UTC m=+1159.110663811" watchObservedRunningTime="2025-11-25 14:44:27.509870373 +0000 UTC m=+1159.113283444" Nov 25 14:44:27 crc kubenswrapper[4879]: I1125 14:44:27.528698 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podStartSLOduration=6.397286352 podStartE2EDuration="1m36.528677446s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.112359002 +0000 UTC m=+1065.715772073" lastFinishedPulling="2025-11-25 14:44:24.243750096 +0000 UTC m=+1155.847163167" observedRunningTime="2025-11-25 14:44:27.527440922 +0000 UTC m=+1159.130853993" watchObservedRunningTime="2025-11-25 14:44:27.528677446 +0000 UTC m=+1159.132090517" Nov 25 14:44:28 crc kubenswrapper[4879]: I1125 14:44:28.247338 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:44:28 crc kubenswrapper[4879]: I1125 14:44:28.250052 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:44:28 crc kubenswrapper[4879]: I1125 14:44:28.250441 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:44:29 crc kubenswrapper[4879]: I1125 14:44:29.265580 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerStarted","Data":"b71ac1a211654bdd6256dd61660e120d3c92ee883e0a27264e763738b1919885"} Nov 25 14:44:29 crc kubenswrapper[4879]: I1125 14:44:29.266227 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:44:29 crc kubenswrapper[4879]: I1125 14:44:29.268925 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" event={"ID":"2037b3b9-3099-4f88-8e56-ec28ee25efa5","Type":"ContainerStarted","Data":"aaca102ee4b0c5bf46a07be9bf9983ea4005ed21f9daeeb9be7ed60867ba72f1"} Nov 25 14:44:29 crc kubenswrapper[4879]: I1125 14:44:29.269361 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:44:29 crc kubenswrapper[4879]: I1125 14:44:29.289618 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podStartSLOduration=4.240554224 podStartE2EDuration="1m38.289592118s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.035726321 +0000 UTC m=+1065.639139392" lastFinishedPulling="2025-11-25 14:44:28.084764215 +0000 UTC m=+1159.688177286" observedRunningTime="2025-11-25 14:44:29.284241989 +0000 UTC m=+1160.887655060" watchObservedRunningTime="2025-11-25 14:44:29.289592118 +0000 UTC m=+1160.893005189" Nov 25 14:44:29 crc kubenswrapper[4879]: I1125 14:44:29.300661 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podStartSLOduration=4.539500735 podStartE2EDuration="1m38.300639816s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.322284857 +0000 UTC m=+1065.925697938" lastFinishedPulling="2025-11-25 14:44:28.083423948 +0000 UTC m=+1159.686837019" observedRunningTime="2025-11-25 14:44:29.298651191 +0000 UTC m=+1160.902064262" watchObservedRunningTime="2025-11-25 14:44:29.300639816 +0000 UTC m=+1160.904052897" Nov 25 14:44:31 crc kubenswrapper[4879]: I1125 14:44:31.799678 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:44:31 crc kubenswrapper[4879]: I1125 14:44:31.801940 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:44:31 crc kubenswrapper[4879]: I1125 14:44:31.996158 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.131563 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.133816 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.292356 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" event={"ID":"4ca6d024-306e-4707-abb0-1b57ed1e11b6","Type":"ContainerStarted","Data":"577958969f7a55151840529f749b47fff22fdc701d92f55927a371f0edc117b6"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.292963 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" event={"ID":"4ca6d024-306e-4707-abb0-1b57ed1e11b6","Type":"ContainerStarted","Data":"a2f04fdcd428e81c5266cd6addfd05d0e2f215ed59ee47fdb5c11415427aa9e3"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.293587 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" event={"ID":"afbf9f55-3316-40bb-b53b-d4d96482f9d5","Type":"ContainerStarted","Data":"c4ee00e63a67ef390923f7545c3c2252a9280c145298e88892162d045a5adf0e"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.293894 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.296025 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerStarted","Data":"5916db203b7bf14bbabf8bbda5a6a4f6b13a70d932f3391ed81231243ddf68bd"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.296172 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.298379 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" event={"ID":"5c199b9d-786f-4520-a7bb-67f616b16b88","Type":"ContainerStarted","Data":"5da5f3449d43923103b1d58e899281bdafddc60c7407c382787dbdba33070b28"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.298408 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.299617 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" event={"ID":"94c3a712-5baa-4789-ad81-8d4c0554d84b","Type":"ContainerStarted","Data":"c61d69d1b9bb698e8ca414526867d5ba8969c43de2b2211136f35a4d65962982"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.300086 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.301592 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" event={"ID":"2e38fffb-d3ee-488d-bbc7-811d4ba43797","Type":"ContainerStarted","Data":"93267b192680033f46d35a8557b35b40337297941abdd3c618311a1e87e38090"} Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.302047 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.314478 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podStartSLOduration=3.797979082 podStartE2EDuration="1m41.314455739s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.11409752 +0000 UTC m=+1065.717510601" lastFinishedPulling="2025-11-25 14:44:31.630574187 +0000 UTC m=+1163.233987258" observedRunningTime="2025-11-25 14:44:32.308335679 +0000 UTC m=+1163.911748750" watchObservedRunningTime="2025-11-25 14:44:32.314455739 +0000 UTC m=+1163.917868810" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.339683 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" podStartSLOduration=4.431118123 podStartE2EDuration="1m41.339636739s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.077945185 +0000 UTC m=+1065.681358256" lastFinishedPulling="2025-11-25 14:44:30.986463801 +0000 UTC m=+1162.589876872" observedRunningTime="2025-11-25 14:44:32.334011293 +0000 UTC m=+1163.937424364" watchObservedRunningTime="2025-11-25 14:44:32.339636739 +0000 UTC m=+1163.943049810" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.346413 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.371647 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" podStartSLOduration=4.852828306 podStartE2EDuration="1m41.371626738s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.46806548 +0000 UTC m=+1066.071478551" lastFinishedPulling="2025-11-25 14:44:30.986863912 +0000 UTC m=+1162.590276983" observedRunningTime="2025-11-25 14:44:32.36557987 +0000 UTC m=+1163.968992971" watchObservedRunningTime="2025-11-25 14:44:32.371626738 +0000 UTC m=+1163.975039799" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.385784 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" podStartSLOduration=4.266123236 podStartE2EDuration="1m41.385764942s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:53.868139002 +0000 UTC m=+1065.471552073" lastFinishedPulling="2025-11-25 14:44:30.987780708 +0000 UTC m=+1162.591193779" observedRunningTime="2025-11-25 14:44:32.38210268 +0000 UTC m=+1163.985515751" watchObservedRunningTime="2025-11-25 14:44:32.385764942 +0000 UTC m=+1163.989178013" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.402909 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podStartSLOduration=4.5316897879999996 podStartE2EDuration="1m41.402889818s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.114910742 +0000 UTC m=+1065.718323813" lastFinishedPulling="2025-11-25 14:44:30.986110772 +0000 UTC m=+1162.589523843" observedRunningTime="2025-11-25 14:44:32.397818576 +0000 UTC m=+1164.001231647" watchObservedRunningTime="2025-11-25 14:44:32.402889818 +0000 UTC m=+1164.006302889" Nov 25 14:44:32 crc kubenswrapper[4879]: I1125 14:44:32.416371 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podStartSLOduration=4.541816139 podStartE2EDuration="1m41.416356431s" podCreationTimestamp="2025-11-25 14:42:51 +0000 UTC" firstStartedPulling="2025-11-25 14:42:54.117380521 +0000 UTC m=+1065.720793592" lastFinishedPulling="2025-11-25 14:44:30.991920803 +0000 UTC m=+1162.595333884" observedRunningTime="2025-11-25 14:44:32.411474476 +0000 UTC m=+1164.014887547" watchObservedRunningTime="2025-11-25 14:44:32.416356431 +0000 UTC m=+1164.019769502" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.016075 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.154981 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.264078 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.349006 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.390701 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.487579 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:44:42 crc kubenswrapper[4879]: I1125 14:44:42.587597 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:44:43 crc kubenswrapper[4879]: I1125 14:44:43.788279 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:44:48 crc kubenswrapper[4879]: I1125 14:44:47.409370 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:44:48 crc kubenswrapper[4879]: I1125 14:44:47.410081 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.140914 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc"] Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.142879 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.146330 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.146519 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.160025 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc"] Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.327180 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fef8236-994c-4660-ad35-11071a8ca4e5-config-volume\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.327429 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qx9h\" (UniqueName: \"kubernetes.io/projected/3fef8236-994c-4660-ad35-11071a8ca4e5-kube-api-access-4qx9h\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.327497 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3fef8236-994c-4660-ad35-11071a8ca4e5-secret-volume\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.428512 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qx9h\" (UniqueName: \"kubernetes.io/projected/3fef8236-994c-4660-ad35-11071a8ca4e5-kube-api-access-4qx9h\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.428570 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3fef8236-994c-4660-ad35-11071a8ca4e5-secret-volume\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.428628 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fef8236-994c-4660-ad35-11071a8ca4e5-config-volume\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.429511 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fef8236-994c-4660-ad35-11071a8ca4e5-config-volume\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.436903 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3fef8236-994c-4660-ad35-11071a8ca4e5-secret-volume\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.451967 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qx9h\" (UniqueName: \"kubernetes.io/projected/3fef8236-994c-4660-ad35-11071a8ca4e5-kube-api-access-4qx9h\") pod \"collect-profiles-29401365-q5zlc\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.475902 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:00 crc kubenswrapper[4879]: I1125 14:45:00.908321 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc"] Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.348110 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-q94xp"] Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.351929 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.353980 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-h8hjg" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.354012 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.354270 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.355100 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-q94xp"] Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.358730 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.406240 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p5hts"] Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.407769 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.410514 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.421392 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p5hts"] Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.504301 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" event={"ID":"3fef8236-994c-4660-ad35-11071a8ca4e5","Type":"ContainerStarted","Data":"98a0f3dc1f916ff40b68a7266ce6a56c5388e28ca2e4cdb821143c4265e42505"} Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.542439 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.542496 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/912c420d-b65e-4f4b-ae21-47055990a5aa-config\") pod \"dnsmasq-dns-675f4bcbfc-q94xp\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.542596 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-config\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.542621 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh67w\" (UniqueName: \"kubernetes.io/projected/912c420d-b65e-4f4b-ae21-47055990a5aa-kube-api-access-hh67w\") pod \"dnsmasq-dns-675f4bcbfc-q94xp\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.542985 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m68f8\" (UniqueName: \"kubernetes.io/projected/95df700a-7290-4560-8765-0c4b1053aff4-kube-api-access-m68f8\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.644716 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m68f8\" (UniqueName: \"kubernetes.io/projected/95df700a-7290-4560-8765-0c4b1053aff4-kube-api-access-m68f8\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.644780 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.644801 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/912c420d-b65e-4f4b-ae21-47055990a5aa-config\") pod \"dnsmasq-dns-675f4bcbfc-q94xp\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.644855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-config\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.644878 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh67w\" (UniqueName: \"kubernetes.io/projected/912c420d-b65e-4f4b-ae21-47055990a5aa-kube-api-access-hh67w\") pod \"dnsmasq-dns-675f4bcbfc-q94xp\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.646041 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-config\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.646061 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.646041 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/912c420d-b65e-4f4b-ae21-47055990a5aa-config\") pod \"dnsmasq-dns-675f4bcbfc-q94xp\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.663248 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m68f8\" (UniqueName: \"kubernetes.io/projected/95df700a-7290-4560-8765-0c4b1053aff4-kube-api-access-m68f8\") pod \"dnsmasq-dns-78dd6ddcc-p5hts\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.663490 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh67w\" (UniqueName: \"kubernetes.io/projected/912c420d-b65e-4f4b-ae21-47055990a5aa-kube-api-access-hh67w\") pod \"dnsmasq-dns-675f4bcbfc-q94xp\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.672460 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:01 crc kubenswrapper[4879]: I1125 14:45:01.722718 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:02 crc kubenswrapper[4879]: I1125 14:45:02.117352 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-q94xp"] Nov 25 14:45:02 crc kubenswrapper[4879]: W1125 14:45:02.119262 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod912c420d_b65e_4f4b_ae21_47055990a5aa.slice/crio-48af932948c65fce01fa531c62c55266d253a36e3d83a610515fe65d24039afd WatchSource:0}: Error finding container 48af932948c65fce01fa531c62c55266d253a36e3d83a610515fe65d24039afd: Status 404 returned error can't find the container with id 48af932948c65fce01fa531c62c55266d253a36e3d83a610515fe65d24039afd Nov 25 14:45:02 crc kubenswrapper[4879]: I1125 14:45:02.169800 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p5hts"] Nov 25 14:45:02 crc kubenswrapper[4879]: W1125 14:45:02.173169 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95df700a_7290_4560_8765_0c4b1053aff4.slice/crio-51fe94c389810845a19cc345d05f217eea548d4fb5a201a096b3e22035946f59 WatchSource:0}: Error finding container 51fe94c389810845a19cc345d05f217eea548d4fb5a201a096b3e22035946f59: Status 404 returned error can't find the container with id 51fe94c389810845a19cc345d05f217eea548d4fb5a201a096b3e22035946f59 Nov 25 14:45:02 crc kubenswrapper[4879]: I1125 14:45:02.516758 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" event={"ID":"3fef8236-994c-4660-ad35-11071a8ca4e5","Type":"ContainerStarted","Data":"82b8d8f559936292b38d5d41780f507fbe3341fb01dac53b713d44502ac5aa73"} Nov 25 14:45:02 crc kubenswrapper[4879]: I1125 14:45:02.521730 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" event={"ID":"912c420d-b65e-4f4b-ae21-47055990a5aa","Type":"ContainerStarted","Data":"48af932948c65fce01fa531c62c55266d253a36e3d83a610515fe65d24039afd"} Nov 25 14:45:02 crc kubenswrapper[4879]: I1125 14:45:02.522970 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" event={"ID":"95df700a-7290-4560-8765-0c4b1053aff4","Type":"ContainerStarted","Data":"51fe94c389810845a19cc345d05f217eea548d4fb5a201a096b3e22035946f59"} Nov 25 14:45:03 crc kubenswrapper[4879]: I1125 14:45:03.534857 4879 generic.go:334] "Generic (PLEG): container finished" podID="3fef8236-994c-4660-ad35-11071a8ca4e5" containerID="82b8d8f559936292b38d5d41780f507fbe3341fb01dac53b713d44502ac5aa73" exitCode=0 Nov 25 14:45:03 crc kubenswrapper[4879]: I1125 14:45:03.534913 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" event={"ID":"3fef8236-994c-4660-ad35-11071a8ca4e5","Type":"ContainerDied","Data":"82b8d8f559936292b38d5d41780f507fbe3341fb01dac53b713d44502ac5aa73"} Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.272898 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-q94xp"] Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.301034 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gtnf9"] Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.304929 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.355570 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gtnf9"] Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.393675 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-config\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.393751 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-dns-svc\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.393849 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r276d\" (UniqueName: \"kubernetes.io/projected/a0786b39-b3fc-4b83-98ef-aefee1014557-kube-api-access-r276d\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.495104 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r276d\" (UniqueName: \"kubernetes.io/projected/a0786b39-b3fc-4b83-98ef-aefee1014557-kube-api-access-r276d\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.495225 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-config\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.495278 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-dns-svc\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.496476 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-dns-svc\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.497630 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-config\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.527776 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r276d\" (UniqueName: \"kubernetes.io/projected/a0786b39-b3fc-4b83-98ef-aefee1014557-kube-api-access-r276d\") pod \"dnsmasq-dns-666b6646f7-gtnf9\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.606070 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p5hts"] Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.625866 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l826d"] Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.626471 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.627649 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.639474 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l826d"] Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.800887 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-config\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.801474 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.801500 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t48r\" (UniqueName: \"kubernetes.io/projected/e78f883b-647e-4d51-ab69-891982a93b03-kube-api-access-9t48r\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.904344 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.904415 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t48r\" (UniqueName: \"kubernetes.io/projected/e78f883b-647e-4d51-ab69-891982a93b03-kube-api-access-9t48r\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.904474 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-config\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.905736 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-config\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.905415 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:04 crc kubenswrapper[4879]: I1125 14:45:04.951949 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t48r\" (UniqueName: \"kubernetes.io/projected/e78f883b-647e-4d51-ab69-891982a93b03-kube-api-access-9t48r\") pod \"dnsmasq-dns-57d769cc4f-l826d\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.025381 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.416429 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gtnf9"] Nov 25 14:45:05 crc kubenswrapper[4879]: W1125 14:45:05.447223 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0786b39_b3fc_4b83_98ef_aefee1014557.slice/crio-9cc521f2ca3ccc9e9fec745226fb2b3b121286271b3ae7c85b5805aa88ec8318 WatchSource:0}: Error finding container 9cc521f2ca3ccc9e9fec745226fb2b3b121286271b3ae7c85b5805aa88ec8318: Status 404 returned error can't find the container with id 9cc521f2ca3ccc9e9fec745226fb2b3b121286271b3ae7c85b5805aa88ec8318 Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.499318 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.501944 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.505430 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.505866 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.506217 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4wlql" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.506462 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.506740 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.506890 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.506992 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.508765 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.520201 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.572700 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" event={"ID":"3fef8236-994c-4660-ad35-11071a8ca4e5","Type":"ContainerDied","Data":"98a0f3dc1f916ff40b68a7266ce6a56c5388e28ca2e4cdb821143c4265e42505"} Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.572745 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98a0f3dc1f916ff40b68a7266ce6a56c5388e28ca2e4cdb821143c4265e42505" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.572795 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.578206 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" event={"ID":"a0786b39-b3fc-4b83-98ef-aefee1014557","Type":"ContainerStarted","Data":"9cc521f2ca3ccc9e9fec745226fb2b3b121286271b3ae7c85b5805aa88ec8318"} Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.619988 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3fef8236-994c-4660-ad35-11071a8ca4e5-secret-volume\") pod \"3fef8236-994c-4660-ad35-11071a8ca4e5\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620092 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fef8236-994c-4660-ad35-11071a8ca4e5-config-volume\") pod \"3fef8236-994c-4660-ad35-11071a8ca4e5\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620390 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qx9h\" (UniqueName: \"kubernetes.io/projected/3fef8236-994c-4660-ad35-11071a8ca4e5-kube-api-access-4qx9h\") pod \"3fef8236-994c-4660-ad35-11071a8ca4e5\" (UID: \"3fef8236-994c-4660-ad35-11071a8ca4e5\") " Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620731 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620825 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620901 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620938 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.620994 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.621025 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.621087 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.621157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.621193 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.621256 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.621392 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzsjr\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-kube-api-access-qzsjr\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.623695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fef8236-994c-4660-ad35-11071a8ca4e5-config-volume" (OuterVolumeSpecName: "config-volume") pod "3fef8236-994c-4660-ad35-11071a8ca4e5" (UID: "3fef8236-994c-4660-ad35-11071a8ca4e5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.625841 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fef8236-994c-4660-ad35-11071a8ca4e5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3fef8236-994c-4660-ad35-11071a8ca4e5" (UID: "3fef8236-994c-4660-ad35-11071a8ca4e5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.631332 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fef8236-994c-4660-ad35-11071a8ca4e5-kube-api-access-4qx9h" (OuterVolumeSpecName: "kube-api-access-4qx9h") pod "3fef8236-994c-4660-ad35-11071a8ca4e5" (UID: "3fef8236-994c-4660-ad35-11071a8ca4e5"). InnerVolumeSpecName "kube-api-access-4qx9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722399 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722587 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722624 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722656 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzsjr\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-kube-api-access-qzsjr\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722684 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722730 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.722757 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723171 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723208 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723233 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723330 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qx9h\" (UniqueName: \"kubernetes.io/projected/3fef8236-994c-4660-ad35-11071a8ca4e5-kube-api-access-4qx9h\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723342 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3fef8236-994c-4660-ad35-11071a8ca4e5-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723352 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fef8236-994c-4660-ad35-11071a8ca4e5-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723394 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.723802 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.724064 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.724354 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.725531 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.725715 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.728596 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.735063 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.740330 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.744049 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.747115 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzsjr\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-kube-api-access-qzsjr\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.763438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.807569 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:45:05 crc kubenswrapper[4879]: E1125 14:45:05.808465 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fef8236-994c-4660-ad35-11071a8ca4e5" containerName="collect-profiles" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.808482 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fef8236-994c-4660-ad35-11071a8ca4e5" containerName="collect-profiles" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.808712 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fef8236-994c-4660-ad35-11071a8ca4e5" containerName="collect-profiles" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.809686 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.813858 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.814094 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.814430 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.814577 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.814978 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.815400 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.815464 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-5zhwv" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.829209 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.860587 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926395 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926473 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926542 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d82p4\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-kube-api-access-d82p4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926582 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926614 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926654 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1814b22-d1b3-4426-9fa2-f613640f63e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926692 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926715 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1814b22-d1b3-4426-9fa2-f613640f63e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926743 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.926956 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:05 crc kubenswrapper[4879]: I1125 14:45:05.927012 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029348 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1814b22-d1b3-4426-9fa2-f613640f63e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029407 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029428 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1814b22-d1b3-4426-9fa2-f613640f63e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029447 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029471 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029513 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029532 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029583 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d82p4\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-kube-api-access-d82p4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029604 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.029627 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.030073 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.031600 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.031785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.032620 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.032928 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.033138 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.039966 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.040406 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1814b22-d1b3-4426-9fa2-f613640f63e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.043083 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.162453 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1814b22-d1b3-4426-9fa2-f613640f63e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.168635 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d82p4\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-kube-api-access-d82p4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.170502 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.295024 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l826d"] Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.426953 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:45:06 crc kubenswrapper[4879]: W1125 14:45:06.434427 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba4e29f9_f929_4f80_ad4d_f6f1ed7e77dd.slice/crio-225dff5803586506635b3f1bc8c0282492f6e545c69a15f6d1e3cbb56e09f252 WatchSource:0}: Error finding container 225dff5803586506635b3f1bc8c0282492f6e545c69a15f6d1e3cbb56e09f252: Status 404 returned error can't find the container with id 225dff5803586506635b3f1bc8c0282492f6e545c69a15f6d1e3cbb56e09f252 Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.464183 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.609311 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd","Type":"ContainerStarted","Data":"225dff5803586506635b3f1bc8c0282492f6e545c69a15f6d1e3cbb56e09f252"} Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.613180 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" event={"ID":"e78f883b-647e-4d51-ab69-891982a93b03","Type":"ContainerStarted","Data":"9443b3e49e79c117506bcc25af2b7f676d7538620d03eea04cf2dc0b6cd83211"} Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.901317 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.907081 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.910431 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.910511 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-fks6b" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.911749 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.913789 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.914256 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.917876 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 14:45:06 crc kubenswrapper[4879]: I1125 14:45:06.918826 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 25 14:45:06 crc kubenswrapper[4879]: W1125 14:45:06.959040 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1814b22_d1b3_4426_9fa2_f613640f63e8.slice/crio-7375a763183441cc5834f0025c23e8d37def2d89a69108d3818d2fe7ff7e8c84 WatchSource:0}: Error finding container 7375a763183441cc5834f0025c23e8d37def2d89a69108d3818d2fe7ff7e8c84: Status 404 returned error can't find the container with id 7375a763183441cc5834f0025c23e8d37def2d89a69108d3818d2fe7ff7e8c84 Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057595 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057650 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-kolla-config\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057667 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057691 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057721 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-default\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057740 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057766 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szfr8\" (UniqueName: \"kubernetes.io/projected/5f98073f-daa8-4796-955e-2f7d767d9125-kube-api-access-szfr8\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.057800 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.158841 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.158916 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-kolla-config\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.158936 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.158985 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.159022 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-default\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.159044 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.159079 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szfr8\" (UniqueName: \"kubernetes.io/projected/5f98073f-daa8-4796-955e-2f7d767d9125-kube-api-access-szfr8\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.159145 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.159492 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.159963 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-kolla-config\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.160753 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.161103 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.161342 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-default\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.168259 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.168934 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.177973 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szfr8\" (UniqueName: \"kubernetes.io/projected/5f98073f-daa8-4796-955e-2f7d767d9125-kube-api-access-szfr8\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.180429 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.279665 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.627644 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1814b22-d1b3-4426-9fa2-f613640f63e8","Type":"ContainerStarted","Data":"7375a763183441cc5834f0025c23e8d37def2d89a69108d3818d2fe7ff7e8c84"} Nov 25 14:45:07 crc kubenswrapper[4879]: I1125 14:45:07.768225 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.347597 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.350249 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.352563 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.352975 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.353090 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-5x88k" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.353223 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.359360 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482413 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482498 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482602 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482632 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djccq\" (UniqueName: \"kubernetes.io/projected/a19706bc-9684-4f70-a0e8-9108014cac2f-kube-api-access-djccq\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482661 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482714 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.482772 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.583927 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.583977 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.584006 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djccq\" (UniqueName: \"kubernetes.io/projected/a19706bc-9684-4f70-a0e8-9108014cac2f-kube-api-access-djccq\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.584047 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.584087 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.584156 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.584184 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.584224 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.585325 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.585439 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.585799 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.586909 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.589207 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.596982 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.608972 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.615437 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djccq\" (UniqueName: \"kubernetes.io/projected/a19706bc-9684-4f70-a0e8-9108014cac2f-kube-api-access-djccq\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.634393 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.643147 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f98073f-daa8-4796-955e-2f7d767d9125","Type":"ContainerStarted","Data":"6d935c42e3280de3e0e1d4bfa0905c43bbbc329996e9a3ab0e58aad6a700bb17"} Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.685238 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.728644 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.729528 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.732232 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-7zcln" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.732462 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.732640 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.748554 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.786167 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm42q\" (UniqueName: \"kubernetes.io/projected/57c000a8-9862-4518-87aa-d818a118973c-kube-api-access-bm42q\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.786398 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-kolla-config\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.786506 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-config-data\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.786612 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.786735 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.888256 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-config-data\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.888582 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.888633 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.888664 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm42q\" (UniqueName: \"kubernetes.io/projected/57c000a8-9862-4518-87aa-d818a118973c-kube-api-access-bm42q\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.888683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-kolla-config\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.889051 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-config-data\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.889639 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-kolla-config\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.894549 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.895042 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:08 crc kubenswrapper[4879]: I1125 14:45:08.926759 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm42q\" (UniqueName: \"kubernetes.io/projected/57c000a8-9862-4518-87aa-d818a118973c-kube-api-access-bm42q\") pod \"memcached-0\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " pod="openstack/memcached-0" Nov 25 14:45:09 crc kubenswrapper[4879]: I1125 14:45:09.058174 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.574183 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.575494 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.577645 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-qnmfv" Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.591659 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.720225 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnv9c\" (UniqueName: \"kubernetes.io/projected/821c14ef-2ea4-488d-84f9-2234a6e27447-kube-api-access-tnv9c\") pod \"kube-state-metrics-0\" (UID: \"821c14ef-2ea4-488d-84f9-2234a6e27447\") " pod="openstack/kube-state-metrics-0" Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.822292 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnv9c\" (UniqueName: \"kubernetes.io/projected/821c14ef-2ea4-488d-84f9-2234a6e27447-kube-api-access-tnv9c\") pod \"kube-state-metrics-0\" (UID: \"821c14ef-2ea4-488d-84f9-2234a6e27447\") " pod="openstack/kube-state-metrics-0" Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.839263 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnv9c\" (UniqueName: \"kubernetes.io/projected/821c14ef-2ea4-488d-84f9-2234a6e27447-kube-api-access-tnv9c\") pod \"kube-state-metrics-0\" (UID: \"821c14ef-2ea4-488d-84f9-2234a6e27447\") " pod="openstack/kube-state-metrics-0" Nov 25 14:45:10 crc kubenswrapper[4879]: I1125 14:45:10.906663 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.321946 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gml5w"] Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.323245 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.325074 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.326807 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.327022 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-h5stt" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.333661 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-2n7ff"] Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.335359 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.338515 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gml5w"] Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.346370 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2n7ff"] Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464384 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-run\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-ovn-controller-tls-certs\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464489 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-log-ovn\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464511 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-combined-ca-bundle\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464531 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464547 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-etc-ovs\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464573 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dt56\" (UniqueName: \"kubernetes.io/projected/a98226c2-37b7-46b6-ba95-ad7fb26e2402-kube-api-access-5dt56\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464597 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-lib\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464623 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-log\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464650 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-scripts\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464678 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a98226c2-37b7-46b6-ba95-ad7fb26e2402-scripts\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464701 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run-ovn\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.464722 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqbdb\" (UniqueName: \"kubernetes.io/projected/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-kube-api-access-bqbdb\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.566571 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dt56\" (UniqueName: \"kubernetes.io/projected/a98226c2-37b7-46b6-ba95-ad7fb26e2402-kube-api-access-5dt56\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.566643 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-lib\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.566677 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-log\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.566707 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-scripts\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568313 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a98226c2-37b7-46b6-ba95-ad7fb26e2402-scripts\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568381 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run-ovn\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568413 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqbdb\" (UniqueName: \"kubernetes.io/projected/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-kube-api-access-bqbdb\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568548 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-run\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568665 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-ovn-controller-tls-certs\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568705 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-log-ovn\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568759 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-combined-ca-bundle\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568793 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.568814 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-etc-ovs\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.569004 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-scripts\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.569157 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-log-ovn\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.569177 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-etc-ovs\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.567227 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-log\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.569295 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-run\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.567378 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-lib\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.569465 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run-ovn\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.569531 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.571003 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a98226c2-37b7-46b6-ba95-ad7fb26e2402-scripts\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.574830 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-ovn-controller-tls-certs\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.579032 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-combined-ca-bundle\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.585191 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqbdb\" (UniqueName: \"kubernetes.io/projected/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-kube-api-access-bqbdb\") pod \"ovn-controller-gml5w\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.585646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dt56\" (UniqueName: \"kubernetes.io/projected/a98226c2-37b7-46b6-ba95-ad7fb26e2402-kube-api-access-5dt56\") pod \"ovn-controller-ovs-2n7ff\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.682312 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w" Nov 25 14:45:13 crc kubenswrapper[4879]: I1125 14:45:13.696342 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.780086 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.783038 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.787387 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-vxt5s" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.787634 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.787837 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.787427 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.788279 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.791313 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910025 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910102 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910151 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlphc\" (UniqueName: \"kubernetes.io/projected/f4987514-2183-451f-98a0-3942895acd0f-kube-api-access-tlphc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910169 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-config\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910189 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910214 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910364 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4987514-2183-451f-98a0-3942895acd0f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:15 crc kubenswrapper[4879]: I1125 14:45:15.910415 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.011944 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012039 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4987514-2183-451f-98a0-3942895acd0f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012072 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012104 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012154 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012188 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlphc\" (UniqueName: \"kubernetes.io/projected/f4987514-2183-451f-98a0-3942895acd0f-kube-api-access-tlphc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012216 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-config\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.012242 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.014699 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.015393 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4987514-2183-451f-98a0-3942895acd0f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.016519 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.017158 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-config\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.019412 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.020696 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.021860 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.034872 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlphc\" (UniqueName: \"kubernetes.io/projected/f4987514-2183-451f-98a0-3942895acd0f-kube-api-access-tlphc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.050116 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:16 crc kubenswrapper[4879]: I1125 14:45:16.116616 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:17 crc kubenswrapper[4879]: I1125 14:45:17.409041 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:45:17 crc kubenswrapper[4879]: I1125 14:45:17.409108 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.448860 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.451865 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.457998 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.458283 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.458671 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9h8lp" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.463131 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.464439 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.562541 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.562606 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.562640 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.562816 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.562974 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.563050 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t85hb\" (UniqueName: \"kubernetes.io/projected/e81331ae-5592-4d18-8116-ef1ef9520145-kube-api-access-t85hb\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.563180 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.563342 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-config\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.664958 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665015 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t85hb\" (UniqueName: \"kubernetes.io/projected/e81331ae-5592-4d18-8116-ef1ef9520145-kube-api-access-t85hb\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665101 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-config\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665188 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665216 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665244 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.665299 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.666102 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.666356 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.666697 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-config\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.667035 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.671794 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.681489 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.684424 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t85hb\" (UniqueName: \"kubernetes.io/projected/e81331ae-5592-4d18-8116-ef1ef9520145-kube-api-access-t85hb\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.686132 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.694379 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:18 crc kubenswrapper[4879]: I1125 14:45:18.779767 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:28 crc kubenswrapper[4879]: I1125 14:45:28.542832 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 25 14:45:29 crc kubenswrapper[4879]: E1125 14:45:29.924023 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Nov 25 14:45:29 crc kubenswrapper[4879]: E1125 14:45:29.924238 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hh67w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-q94xp_openstack(912c420d-b65e-4f4b-ae21-47055990a5aa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:29 crc kubenswrapper[4879]: E1125 14:45:29.925419 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" podUID="912c420d-b65e-4f4b-ae21-47055990a5aa" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.785072 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.785594 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qzsjr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.786841 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.828217 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.830828 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d82p4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(c1814b22-d1b3-4426-9fa2-f613640f63e8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.831238 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.831939 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.845147 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.845301 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m68f8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-p5hts_openstack(95df700a-7290-4560-8765-0c4b1053aff4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:30 crc kubenswrapper[4879]: E1125 14:45:30.846458 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" podUID="95df700a-7290-4560-8765-0c4b1053aff4" Nov 25 14:45:31 crc kubenswrapper[4879]: E1125 14:45:31.836082 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.166489 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.166595 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.166663 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9t48r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-l826d_openstack(e78f883b-647e-4d51-ab69-891982a93b03): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.166730 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r276d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-gtnf9_openstack(a0786b39-b3fc-4b83-98ef-aefee1014557): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.167883 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" podUID="e78f883b-647e-4d51-ab69-891982a93b03" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.167941 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" podUID="a0786b39-b3fc-4b83-98ef-aefee1014557" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.188408 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.188809 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-szfr8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(5f98073f-daa8-4796-955e-2f7d767d9125): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.189998 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.313647 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.329151 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.421683 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/912c420d-b65e-4f4b-ae21-47055990a5aa-config\") pod \"912c420d-b65e-4f4b-ae21-47055990a5aa\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.421777 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m68f8\" (UniqueName: \"kubernetes.io/projected/95df700a-7290-4560-8765-0c4b1053aff4-kube-api-access-m68f8\") pod \"95df700a-7290-4560-8765-0c4b1053aff4\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.421861 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-dns-svc\") pod \"95df700a-7290-4560-8765-0c4b1053aff4\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.421921 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh67w\" (UniqueName: \"kubernetes.io/projected/912c420d-b65e-4f4b-ae21-47055990a5aa-kube-api-access-hh67w\") pod \"912c420d-b65e-4f4b-ae21-47055990a5aa\" (UID: \"912c420d-b65e-4f4b-ae21-47055990a5aa\") " Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.421965 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-config\") pod \"95df700a-7290-4560-8765-0c4b1053aff4\" (UID: \"95df700a-7290-4560-8765-0c4b1053aff4\") " Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.422900 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/912c420d-b65e-4f4b-ae21-47055990a5aa-config" (OuterVolumeSpecName: "config") pod "912c420d-b65e-4f4b-ae21-47055990a5aa" (UID: "912c420d-b65e-4f4b-ae21-47055990a5aa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.422913 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "95df700a-7290-4560-8765-0c4b1053aff4" (UID: "95df700a-7290-4560-8765-0c4b1053aff4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.423189 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/912c420d-b65e-4f4b-ae21-47055990a5aa-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.423214 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.423301 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-config" (OuterVolumeSpecName: "config") pod "95df700a-7290-4560-8765-0c4b1053aff4" (UID: "95df700a-7290-4560-8765-0c4b1053aff4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.428785 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/912c420d-b65e-4f4b-ae21-47055990a5aa-kube-api-access-hh67w" (OuterVolumeSpecName: "kube-api-access-hh67w") pod "912c420d-b65e-4f4b-ae21-47055990a5aa" (UID: "912c420d-b65e-4f4b-ae21-47055990a5aa"). InnerVolumeSpecName "kube-api-access-hh67w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.429272 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95df700a-7290-4560-8765-0c4b1053aff4-kube-api-access-m68f8" (OuterVolumeSpecName: "kube-api-access-m68f8") pod "95df700a-7290-4560-8765-0c4b1053aff4" (UID: "95df700a-7290-4560-8765-0c4b1053aff4"). InnerVolumeSpecName "kube-api-access-m68f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.524556 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m68f8\" (UniqueName: \"kubernetes.io/projected/95df700a-7290-4560-8765-0c4b1053aff4-kube-api-access-m68f8\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.524600 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh67w\" (UniqueName: \"kubernetes.io/projected/912c420d-b65e-4f4b-ae21-47055990a5aa-kube-api-access-hh67w\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.524618 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95df700a-7290-4560-8765-0c4b1053aff4-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.680564 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 14:45:33 crc kubenswrapper[4879]: W1125 14:45:33.683116 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda19706bc_9684_4f70_a0e8_9108014cac2f.slice/crio-659da50acbc281b21285e2d037f6e307bcb0d8341256caa802f52d2d3ee052af WatchSource:0}: Error finding container 659da50acbc281b21285e2d037f6e307bcb0d8341256caa802f52d2d3ee052af: Status 404 returned error can't find the container with id 659da50acbc281b21285e2d037f6e307bcb0d8341256caa802f52d2d3ee052af Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.847037 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.855116 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gml5w"] Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.863644 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.868341 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"57c000a8-9862-4518-87aa-d818a118973c","Type":"ContainerStarted","Data":"c7fb3a736b89e1c517e2f412d6f2d74e950e7bf107319bbacf17832e99c8e1f0"} Nov 25 14:45:33 crc kubenswrapper[4879]: W1125 14:45:33.873236 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod821c14ef_2ea4_488d_84f9_2234a6e27447.slice/crio-c73f12ccd9ac7776c7a99ae48420cf07b60b7231ba61e30eed05d67cb274e60a WatchSource:0}: Error finding container c73f12ccd9ac7776c7a99ae48420cf07b60b7231ba61e30eed05d67cb274e60a: Status 404 returned error can't find the container with id c73f12ccd9ac7776c7a99ae48420cf07b60b7231ba61e30eed05d67cb274e60a Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.874555 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" event={"ID":"912c420d-b65e-4f4b-ae21-47055990a5aa","Type":"ContainerDied","Data":"48af932948c65fce01fa531c62c55266d253a36e3d83a610515fe65d24039afd"} Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.874584 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-q94xp" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.880111 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a19706bc-9684-4f70-a0e8-9108014cac2f","Type":"ContainerStarted","Data":"659da50acbc281b21285e2d037f6e307bcb0d8341256caa802f52d2d3ee052af"} Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.884985 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" event={"ID":"95df700a-7290-4560-8765-0c4b1053aff4","Type":"ContainerDied","Data":"51fe94c389810845a19cc345d05f217eea548d4fb5a201a096b3e22035946f59"} Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.885188 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p5hts" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.887293 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" podUID="a0786b39-b3fc-4b83-98ef-aefee1014557" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.887577 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" Nov 25 14:45:33 crc kubenswrapper[4879]: E1125 14:45:33.887644 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" podUID="e78f883b-647e-4d51-ab69-891982a93b03" Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.932996 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-q94xp"] Nov 25 14:45:33 crc kubenswrapper[4879]: I1125 14:45:33.944875 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-q94xp"] Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.049769 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p5hts"] Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.069310 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p5hts"] Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.079311 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2n7ff"] Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.521108 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.896851 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w" event={"ID":"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff","Type":"ContainerStarted","Data":"3da15c460697077097ce1a124ca5f4c32832a82b8f9eb63d688851cf7ec8c3b9"} Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.900179 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e81331ae-5592-4d18-8116-ef1ef9520145","Type":"ContainerStarted","Data":"f249f05397b38fbb3270070c2671f4c759574fa503fc84f3dd1c0488218071dd"} Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.902747 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a19706bc-9684-4f70-a0e8-9108014cac2f","Type":"ContainerStarted","Data":"c67064a374f4c5f4abf06b24674bf19e46cdaefe83ab057e721853f6b7b3b0a0"} Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.906231 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerStarted","Data":"850b5152ff08503da0ba810245f0c46534854023d505e357b1d2bd961a9588a8"} Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.907844 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f4987514-2183-451f-98a0-3942895acd0f","Type":"ContainerStarted","Data":"1403afd49d3f224b05842344fa705db2dabb2e32583b114ad65cecaab8f2f384"} Nov 25 14:45:34 crc kubenswrapper[4879]: I1125 14:45:34.909787 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerStarted","Data":"c73f12ccd9ac7776c7a99ae48420cf07b60b7231ba61e30eed05d67cb274e60a"} Nov 25 14:45:35 crc kubenswrapper[4879]: I1125 14:45:35.653781 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="912c420d-b65e-4f4b-ae21-47055990a5aa" path="/var/lib/kubelet/pods/912c420d-b65e-4f4b-ae21-47055990a5aa/volumes" Nov 25 14:45:35 crc kubenswrapper[4879]: I1125 14:45:35.654189 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95df700a-7290-4560-8765-0c4b1053aff4" path="/var/lib/kubelet/pods/95df700a-7290-4560-8765-0c4b1053aff4/volumes" Nov 25 14:45:37 crc kubenswrapper[4879]: I1125 14:45:37.850666 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-q22r5"] Nov 25 14:45:37 crc kubenswrapper[4879]: I1125 14:45:37.852749 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:37 crc kubenswrapper[4879]: I1125 14:45:37.857411 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 25 14:45:37 crc kubenswrapper[4879]: I1125 14:45:37.862532 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-q22r5"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.007981 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l826d"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.008874 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-combined-ca-bundle\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.008961 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87f6g\" (UniqueName: \"kubernetes.io/projected/1d972e68-542e-456d-9b40-5cf7aa4b68c7-kube-api-access-87f6g\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.008998 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovn-rundir\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.009026 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d972e68-542e-456d-9b40-5cf7aa4b68c7-config\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.009169 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovs-rundir\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.009328 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.037682 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-cm4j8"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.041533 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.044644 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.065315 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-cm4j8"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118178 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118281 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87f6g\" (UniqueName: \"kubernetes.io/projected/1d972e68-542e-456d-9b40-5cf7aa4b68c7-kube-api-access-87f6g\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118309 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovn-rundir\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118333 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d972e68-542e-456d-9b40-5cf7aa4b68c7-config\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118390 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovs-rundir\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118420 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtjfk\" (UniqueName: \"kubernetes.io/projected/03f3f243-b385-4a40-aaa9-dc9f97801505-kube-api-access-qtjfk\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118466 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118491 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118512 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-config\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.118533 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-combined-ca-bundle\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.119691 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovs-rundir\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.119991 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovn-rundir\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.120961 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d972e68-542e-456d-9b40-5cf7aa4b68c7-config\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.126175 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.126213 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-combined-ca-bundle\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.139174 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87f6g\" (UniqueName: \"kubernetes.io/projected/1d972e68-542e-456d-9b40-5cf7aa4b68c7-kube-api-access-87f6g\") pod \"ovn-controller-metrics-q22r5\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.177102 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.220241 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.220416 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtjfk\" (UniqueName: \"kubernetes.io/projected/03f3f243-b385-4a40-aaa9-dc9f97801505-kube-api-access-qtjfk\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.220781 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.220817 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-config\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.221109 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.221762 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-config\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.221894 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.221889 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gtnf9"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.251178 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-2pn6h"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.254604 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.263931 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.272770 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-2pn6h"] Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.283794 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtjfk\" (UniqueName: \"kubernetes.io/projected/03f3f243-b385-4a40-aaa9-dc9f97801505-kube-api-access-qtjfk\") pod \"dnsmasq-dns-5bf47b49b7-cm4j8\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.322247 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-dns-svc\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.322364 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-config\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.322407 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.322425 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw7rt\" (UniqueName: \"kubernetes.io/projected/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-kube-api-access-vw7rt\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.322451 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.367925 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.425115 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.425528 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw7rt\" (UniqueName: \"kubernetes.io/projected/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-kube-api-access-vw7rt\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.425571 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.425610 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-dns-svc\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.426084 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.426365 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.426806 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-config\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.427245 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-dns-svc\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.427416 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-config\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.444760 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw7rt\" (UniqueName: \"kubernetes.io/projected/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-kube-api-access-vw7rt\") pod \"dnsmasq-dns-8554648995-2pn6h\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:38 crc kubenswrapper[4879]: I1125 14:45:38.628937 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.455427 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.551186 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-config\") pod \"a0786b39-b3fc-4b83-98ef-aefee1014557\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.551282 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-dns-svc\") pod \"a0786b39-b3fc-4b83-98ef-aefee1014557\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.551378 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r276d\" (UniqueName: \"kubernetes.io/projected/a0786b39-b3fc-4b83-98ef-aefee1014557-kube-api-access-r276d\") pod \"a0786b39-b3fc-4b83-98ef-aefee1014557\" (UID: \"a0786b39-b3fc-4b83-98ef-aefee1014557\") " Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.552626 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a0786b39-b3fc-4b83-98ef-aefee1014557" (UID: "a0786b39-b3fc-4b83-98ef-aefee1014557"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.552618 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-config" (OuterVolumeSpecName: "config") pod "a0786b39-b3fc-4b83-98ef-aefee1014557" (UID: "a0786b39-b3fc-4b83-98ef-aefee1014557"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.558212 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0786b39-b3fc-4b83-98ef-aefee1014557-kube-api-access-r276d" (OuterVolumeSpecName: "kube-api-access-r276d") pod "a0786b39-b3fc-4b83-98ef-aefee1014557" (UID: "a0786b39-b3fc-4b83-98ef-aefee1014557"). InnerVolumeSpecName "kube-api-access-r276d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.652813 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.652852 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0786b39-b3fc-4b83-98ef-aefee1014557-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.652868 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r276d\" (UniqueName: \"kubernetes.io/projected/a0786b39-b3fc-4b83-98ef-aefee1014557-kube-api-access-r276d\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.830663 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.960208 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-config\") pod \"e78f883b-647e-4d51-ab69-891982a93b03\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.960588 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-dns-svc\") pod \"e78f883b-647e-4d51-ab69-891982a93b03\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.960745 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9t48r\" (UniqueName: \"kubernetes.io/projected/e78f883b-647e-4d51-ab69-891982a93b03-kube-api-access-9t48r\") pod \"e78f883b-647e-4d51-ab69-891982a93b03\" (UID: \"e78f883b-647e-4d51-ab69-891982a93b03\") " Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.962766 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-config" (OuterVolumeSpecName: "config") pod "e78f883b-647e-4d51-ab69-891982a93b03" (UID: "e78f883b-647e-4d51-ab69-891982a93b03"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.963226 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e78f883b-647e-4d51-ab69-891982a93b03" (UID: "e78f883b-647e-4d51-ab69-891982a93b03"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.965459 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e78f883b-647e-4d51-ab69-891982a93b03-kube-api-access-9t48r" (OuterVolumeSpecName: "kube-api-access-9t48r") pod "e78f883b-647e-4d51-ab69-891982a93b03" (UID: "e78f883b-647e-4d51-ab69-891982a93b03"). InnerVolumeSpecName "kube-api-access-9t48r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.970766 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" event={"ID":"e78f883b-647e-4d51-ab69-891982a93b03","Type":"ContainerDied","Data":"9443b3e49e79c117506bcc25af2b7f676d7538620d03eea04cf2dc0b6cd83211"} Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.970873 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-l826d" Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.975233 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" event={"ID":"a0786b39-b3fc-4b83-98ef-aefee1014557","Type":"ContainerDied","Data":"9cc521f2ca3ccc9e9fec745226fb2b3b121286271b3ae7c85b5805aa88ec8318"} Nov 25 14:45:39 crc kubenswrapper[4879]: I1125 14:45:39.975333 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-gtnf9" Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.014944 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gtnf9"] Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.026318 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-gtnf9"] Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.040755 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l826d"] Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.046370 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-l826d"] Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.062568 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.062608 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e78f883b-647e-4d51-ab69-891982a93b03-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:40 crc kubenswrapper[4879]: I1125 14:45:40.062623 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9t48r\" (UniqueName: \"kubernetes.io/projected/e78f883b-647e-4d51-ab69-891982a93b03-kube-api-access-9t48r\") on node \"crc\" DevicePath \"\"" Nov 25 14:45:41 crc kubenswrapper[4879]: I1125 14:45:41.654883 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0786b39-b3fc-4b83-98ef-aefee1014557" path="/var/lib/kubelet/pods/a0786b39-b3fc-4b83-98ef-aefee1014557/volumes" Nov 25 14:45:41 crc kubenswrapper[4879]: I1125 14:45:41.655514 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e78f883b-647e-4d51-ab69-891982a93b03" path="/var/lib/kubelet/pods/e78f883b-647e-4d51-ab69-891982a93b03/volumes" Nov 25 14:45:43 crc kubenswrapper[4879]: I1125 14:45:43.185831 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-2pn6h"] Nov 25 14:45:44 crc kubenswrapper[4879]: I1125 14:45:44.004921 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-2pn6h" event={"ID":"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61","Type":"ContainerStarted","Data":"c512712c55265b6f47eccad060c050fad98fe90bddb1fcfd062870db913d5e2c"} Nov 25 14:45:44 crc kubenswrapper[4879]: I1125 14:45:44.008146 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-cm4j8"] Nov 25 14:45:44 crc kubenswrapper[4879]: I1125 14:45:44.075362 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-q22r5"] Nov 25 14:45:44 crc kubenswrapper[4879]: W1125 14:45:44.292397 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d972e68_542e_456d_9b40_5cf7aa4b68c7.slice/crio-53a7ae79e2edc5c0900af3439c37d0c45f9356ae834d1f876f1849d3b1824fce WatchSource:0}: Error finding container 53a7ae79e2edc5c0900af3439c37d0c45f9356ae834d1f876f1849d3b1824fce: Status 404 returned error can't find the container with id 53a7ae79e2edc5c0900af3439c37d0c45f9356ae834d1f876f1849d3b1824fce Nov 25 14:45:45 crc kubenswrapper[4879]: I1125 14:45:45.021374 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-q22r5" event={"ID":"1d972e68-542e-456d-9b40-5cf7aa4b68c7","Type":"ContainerStarted","Data":"53a7ae79e2edc5c0900af3439c37d0c45f9356ae834d1f876f1849d3b1824fce"} Nov 25 14:45:47 crc kubenswrapper[4879]: I1125 14:45:47.038100 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" event={"ID":"03f3f243-b385-4a40-aaa9-dc9f97801505","Type":"ContainerStarted","Data":"61cb4eb79e056f543b44956d89088284451d89a323b4ab6eea6aa8bb5d3ce24b"} Nov 25 14:45:47 crc kubenswrapper[4879]: I1125 14:45:47.409084 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:45:47 crc kubenswrapper[4879]: I1125 14:45:47.409172 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:45:47 crc kubenswrapper[4879]: I1125 14:45:47.409230 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:45:47 crc kubenswrapper[4879]: I1125 14:45:47.409947 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"47970bca382c877b01ebc504c8bf019e55bd57a204827e1087ac93b9715656b7"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:45:47 crc kubenswrapper[4879]: I1125 14:45:47.410014 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://47970bca382c877b01ebc504c8bf019e55bd57a204827e1087ac93b9715656b7" gracePeriod=600 Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.049286 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="47970bca382c877b01ebc504c8bf019e55bd57a204827e1087ac93b9715656b7" exitCode=0 Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.049369 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"47970bca382c877b01ebc504c8bf019e55bd57a204827e1087ac93b9715656b7"} Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.049872 4879 scope.go:117] "RemoveContainer" containerID="46313fa375227d4de77ccdffe103fa35d82fc54f0a327407b56caa844d260bef" Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.051465 4879 generic.go:334] "Generic (PLEG): container finished" podID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerID="c67064a374f4c5f4abf06b24674bf19e46cdaefe83ab057e721853f6b7b3b0a0" exitCode=0 Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.051576 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a19706bc-9684-4f70-a0e8-9108014cac2f","Type":"ContainerDied","Data":"c67064a374f4c5f4abf06b24674bf19e46cdaefe83ab057e721853f6b7b3b0a0"} Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.054297 4879 generic.go:334] "Generic (PLEG): container finished" podID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerID="bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540" exitCode=0 Nov 25 14:45:48 crc kubenswrapper[4879]: I1125 14:45:48.054336 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerDied","Data":"bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540"} Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.066465 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e81331ae-5592-4d18-8116-ef1ef9520145","Type":"ContainerStarted","Data":"4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1"} Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.069205 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"57c000a8-9862-4518-87aa-d818a118973c","Type":"ContainerStarted","Data":"1093ba44c36e637d70c8ff92badde094bc3eaebb6c3b1cca2915b1d61de7025a"} Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.069322 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.071212 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f4987514-2183-451f-98a0-3942895acd0f","Type":"ContainerStarted","Data":"59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc"} Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.073093 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w" event={"ID":"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff","Type":"ContainerStarted","Data":"f091e7cd3e3f4ba1a8760e4a6cd4c1fa2c752f285fc9bff0e20deff09ae868d2"} Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.073266 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-gml5w" Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.098104 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=29.985246884 podStartE2EDuration="41.098081296s" podCreationTimestamp="2025-11-25 14:45:08 +0000 UTC" firstStartedPulling="2025-11-25 14:45:33.174759754 +0000 UTC m=+1224.778172825" lastFinishedPulling="2025-11-25 14:45:44.287594166 +0000 UTC m=+1235.891007237" observedRunningTime="2025-11-25 14:45:49.090342402 +0000 UTC m=+1240.693755483" watchObservedRunningTime="2025-11-25 14:45:49.098081296 +0000 UTC m=+1240.701494357" Nov 25 14:45:49 crc kubenswrapper[4879]: I1125 14:45:49.113798 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-gml5w" podStartSLOduration=23.634391608 podStartE2EDuration="36.113772932s" podCreationTimestamp="2025-11-25 14:45:13 +0000 UTC" firstStartedPulling="2025-11-25 14:45:33.908017921 +0000 UTC m=+1225.511430992" lastFinishedPulling="2025-11-25 14:45:46.387399255 +0000 UTC m=+1237.990812316" observedRunningTime="2025-11-25 14:45:49.111077917 +0000 UTC m=+1240.714490988" watchObservedRunningTime="2025-11-25 14:45:49.113772932 +0000 UTC m=+1240.717186003" Nov 25 14:45:50 crc kubenswrapper[4879]: I1125 14:45:50.083604 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"c5933a5c56b3055e4c7db8e0eb320d1e1133844e1dbe1178c77e8edc7456d7cd"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.101716 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerStarted","Data":"8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.102268 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerStarted","Data":"092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.102294 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.104956 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f4987514-2183-451f-98a0-3942895acd0f","Type":"ContainerStarted","Data":"73e8dd61029c94294aeb86b9c378ec7ea7b10d602ce332a8330879bb615f72bb"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.107038 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerStarted","Data":"ea3c704c9aa1285b9bc543c7cfe874f66462b4c6971e383387f4e246d4e75d11"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.107713 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.109460 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1814b22-d1b3-4426-9fa2-f613640f63e8","Type":"ContainerStarted","Data":"cf474471d6f23adf5b05a9b2ace8992e9dc0e24725d55440de35a374bae09a8a"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.111217 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f98073f-daa8-4796-955e-2f7d767d9125","Type":"ContainerStarted","Data":"72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.112960 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-q22r5" event={"ID":"1d972e68-542e-456d-9b40-5cf7aa4b68c7","Type":"ContainerStarted","Data":"dfa8795114b5f0a0a7289db1a6c75a7e18bf770c9753b6f30a6cc638d3462c39"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.115079 4879 generic.go:334] "Generic (PLEG): container finished" podID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerID="d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748" exitCode=0 Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.115148 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" event={"ID":"03f3f243-b385-4a40-aaa9-dc9f97801505","Type":"ContainerDied","Data":"d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.117096 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.118092 4879 generic.go:334] "Generic (PLEG): container finished" podID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerID="2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646" exitCode=0 Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.118156 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-2pn6h" event={"ID":"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61","Type":"ContainerDied","Data":"2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.123975 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-2n7ff" podStartSLOduration=29.356274435 podStartE2EDuration="38.123951456s" podCreationTimestamp="2025-11-25 14:45:13 +0000 UTC" firstStartedPulling="2025-11-25 14:45:34.03568026 +0000 UTC m=+1225.639093331" lastFinishedPulling="2025-11-25 14:45:42.803357281 +0000 UTC m=+1234.406770352" observedRunningTime="2025-11-25 14:45:51.122034983 +0000 UTC m=+1242.725448064" watchObservedRunningTime="2025-11-25 14:45:51.123951456 +0000 UTC m=+1242.727364537" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.127663 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e81331ae-5592-4d18-8116-ef1ef9520145","Type":"ContainerStarted","Data":"350d4e7b8e9d631147fe9a755af14db63aa3e7ee234ef7375cd6205d968d2327"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.130104 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a19706bc-9684-4f70-a0e8-9108014cac2f","Type":"ContainerStarted","Data":"3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8"} Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.158476 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-q22r5" podStartSLOduration=8.725513527 podStartE2EDuration="14.158458543s" podCreationTimestamp="2025-11-25 14:45:37 +0000 UTC" firstStartedPulling="2025-11-25 14:45:44.303334972 +0000 UTC m=+1235.906748033" lastFinishedPulling="2025-11-25 14:45:49.736279978 +0000 UTC m=+1241.339693049" observedRunningTime="2025-11-25 14:45:51.141922794 +0000 UTC m=+1242.745335865" watchObservedRunningTime="2025-11-25 14:45:51.158458543 +0000 UTC m=+1242.761871624" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.168993 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=22.063376541 podStartE2EDuration="37.168972934s" podCreationTimestamp="2025-11-25 14:45:14 +0000 UTC" firstStartedPulling="2025-11-25 14:45:34.639364655 +0000 UTC m=+1226.242777716" lastFinishedPulling="2025-11-25 14:45:49.744961028 +0000 UTC m=+1241.348374109" observedRunningTime="2025-11-25 14:45:51.164774007 +0000 UTC m=+1242.768187078" watchObservedRunningTime="2025-11-25 14:45:51.168972934 +0000 UTC m=+1242.772386015" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.284967 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=26.010174495 podStartE2EDuration="41.284948429s" podCreationTimestamp="2025-11-25 14:45:10 +0000 UTC" firstStartedPulling="2025-11-25 14:45:33.8784013 +0000 UTC m=+1225.481814371" lastFinishedPulling="2025-11-25 14:45:49.153175244 +0000 UTC m=+1240.756588305" observedRunningTime="2025-11-25 14:45:51.28246912 +0000 UTC m=+1242.885882191" watchObservedRunningTime="2025-11-25 14:45:51.284948429 +0000 UTC m=+1242.888361500" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.322808 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=43.798928806 podStartE2EDuration="44.322791178s" podCreationTimestamp="2025-11-25 14:45:07 +0000 UTC" firstStartedPulling="2025-11-25 14:45:33.688040533 +0000 UTC m=+1225.291453614" lastFinishedPulling="2025-11-25 14:45:34.211902915 +0000 UTC m=+1225.815315986" observedRunningTime="2025-11-25 14:45:51.317837751 +0000 UTC m=+1242.921250822" watchObservedRunningTime="2025-11-25 14:45:51.322791178 +0000 UTC m=+1242.926204249" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.344990 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=18.405634457 podStartE2EDuration="34.344974733s" podCreationTimestamp="2025-11-25 14:45:17 +0000 UTC" firstStartedPulling="2025-11-25 14:45:33.861989486 +0000 UTC m=+1225.465402557" lastFinishedPulling="2025-11-25 14:45:49.801329762 +0000 UTC m=+1241.404742833" observedRunningTime="2025-11-25 14:45:51.337899997 +0000 UTC m=+1242.941313068" watchObservedRunningTime="2025-11-25 14:45:51.344974733 +0000 UTC m=+1242.948387804" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.780893 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:51 crc kubenswrapper[4879]: I1125 14:45:51.823194 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.117424 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.140083 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" event={"ID":"03f3f243-b385-4a40-aaa9-dc9f97801505","Type":"ContainerStarted","Data":"027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367"} Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.140178 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.143038 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-2pn6h" event={"ID":"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61","Type":"ContainerStarted","Data":"f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81"} Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.143077 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.145464 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd","Type":"ContainerStarted","Data":"a476d9d71c0fa760bf1e708c942cc87f51d050d16da32ce40f524f553587e4f7"} Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.145788 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.146436 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.160772 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" podStartSLOduration=12.555819549 podStartE2EDuration="14.160752488s" podCreationTimestamp="2025-11-25 14:45:38 +0000 UTC" firstStartedPulling="2025-11-25 14:45:46.397609278 +0000 UTC m=+1238.001022349" lastFinishedPulling="2025-11-25 14:45:48.002542227 +0000 UTC m=+1239.605955288" observedRunningTime="2025-11-25 14:45:52.158649859 +0000 UTC m=+1243.762062930" watchObservedRunningTime="2025-11-25 14:45:52.160752488 +0000 UTC m=+1243.764165559" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.167611 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:52 crc kubenswrapper[4879]: I1125 14:45:52.210196 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-2pn6h" podStartSLOduration=10.342859723 podStartE2EDuration="14.210179688s" podCreationTimestamp="2025-11-25 14:45:38 +0000 UTC" firstStartedPulling="2025-11-25 14:45:44.029170502 +0000 UTC m=+1235.632583573" lastFinishedPulling="2025-11-25 14:45:47.896490477 +0000 UTC m=+1239.499903538" observedRunningTime="2025-11-25 14:45:52.202773013 +0000 UTC m=+1243.806186084" watchObservedRunningTime="2025-11-25 14:45:52.210179688 +0000 UTC m=+1243.813592759" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.189378 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.189436 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.460734 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.462370 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.467929 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.470741 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.470992 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.471214 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.471364 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-wztmv" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624111 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtrst\" (UniqueName: \"kubernetes.io/projected/f8f130d5-685b-4b37-89bb-b9536109c8fb-kube-api-access-vtrst\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624172 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-scripts\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624363 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-config\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624422 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624492 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.624518 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725512 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtrst\" (UniqueName: \"kubernetes.io/projected/f8f130d5-685b-4b37-89bb-b9536109c8fb-kube-api-access-vtrst\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725569 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725654 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-scripts\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725718 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-config\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725738 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725768 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.725789 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.726768 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-scripts\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.726889 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.727107 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-config\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.734030 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.734095 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.736945 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.754509 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtrst\" (UniqueName: \"kubernetes.io/projected/f8f130d5-685b-4b37-89bb-b9536109c8fb-kube-api-access-vtrst\") pod \"ovn-northd-0\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " pod="openstack/ovn-northd-0" Nov 25 14:45:53 crc kubenswrapper[4879]: I1125 14:45:53.781863 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 25 14:45:54 crc kubenswrapper[4879]: I1125 14:45:54.060080 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 25 14:45:54 crc kubenswrapper[4879]: I1125 14:45:54.258243 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 14:45:54 crc kubenswrapper[4879]: W1125 14:45:54.268059 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8f130d5_685b_4b37_89bb_b9536109c8fb.slice/crio-d3722bd731d541dba0d192346e66860aa03ec2135d10ce36b86d37c8da54bd50 WatchSource:0}: Error finding container d3722bd731d541dba0d192346e66860aa03ec2135d10ce36b86d37c8da54bd50: Status 404 returned error can't find the container with id d3722bd731d541dba0d192346e66860aa03ec2135d10ce36b86d37c8da54bd50 Nov 25 14:45:55 crc kubenswrapper[4879]: I1125 14:45:55.167439 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f8f130d5-685b-4b37-89bb-b9536109c8fb","Type":"ContainerStarted","Data":"d3722bd731d541dba0d192346e66860aa03ec2135d10ce36b86d37c8da54bd50"} Nov 25 14:45:55 crc kubenswrapper[4879]: E1125 14:45:55.359560 4879 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.190:50486->38.102.83.190:33827: write tcp 38.102.83.190:50486->38.102.83.190:33827: write: connection reset by peer Nov 25 14:45:57 crc kubenswrapper[4879]: I1125 14:45:57.181540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f8f130d5-685b-4b37-89bb-b9536109c8fb","Type":"ContainerStarted","Data":"6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c"} Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.191816 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f8f130d5-685b-4b37-89bb-b9536109c8fb","Type":"ContainerStarted","Data":"d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd"} Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.192174 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.221252 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.029108732 podStartE2EDuration="5.221225791s" podCreationTimestamp="2025-11-25 14:45:53 +0000 UTC" firstStartedPulling="2025-11-25 14:45:54.269962367 +0000 UTC m=+1245.873375438" lastFinishedPulling="2025-11-25 14:45:56.462079426 +0000 UTC m=+1248.065492497" observedRunningTime="2025-11-25 14:45:58.213326593 +0000 UTC m=+1249.816739674" watchObservedRunningTime="2025-11-25 14:45:58.221225791 +0000 UTC m=+1249.824638872" Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.371356 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.631155 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.685587 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.687138 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.719672 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-cm4j8"] Nov 25 14:45:58 crc kubenswrapper[4879]: I1125 14:45:58.958449 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 25 14:45:59 crc kubenswrapper[4879]: I1125 14:45:59.199633 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerName="dnsmasq-dns" containerID="cri-o://027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367" gracePeriod=10 Nov 25 14:45:59 crc kubenswrapper[4879]: I1125 14:45:59.283333 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.094845 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.206066 4879 generic.go:334] "Generic (PLEG): container finished" podID="5f98073f-daa8-4796-955e-2f7d767d9125" containerID="72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57" exitCode=0 Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.206155 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f98073f-daa8-4796-955e-2f7d767d9125","Type":"ContainerDied","Data":"72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57"} Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.207525 4879 generic.go:334] "Generic (PLEG): container finished" podID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerID="027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367" exitCode=0 Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.207659 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" event={"ID":"03f3f243-b385-4a40-aaa9-dc9f97801505","Type":"ContainerDied","Data":"027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367"} Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.207736 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.208272 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-cm4j8" event={"ID":"03f3f243-b385-4a40-aaa9-dc9f97801505","Type":"ContainerDied","Data":"61cb4eb79e056f543b44956d89088284451d89a323b4ab6eea6aa8bb5d3ce24b"} Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.208310 4879 scope.go:117] "RemoveContainer" containerID="027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.228245 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-dns-svc\") pod \"03f3f243-b385-4a40-aaa9-dc9f97801505\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.228399 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-ovsdbserver-nb\") pod \"03f3f243-b385-4a40-aaa9-dc9f97801505\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.228567 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtjfk\" (UniqueName: \"kubernetes.io/projected/03f3f243-b385-4a40-aaa9-dc9f97801505-kube-api-access-qtjfk\") pod \"03f3f243-b385-4a40-aaa9-dc9f97801505\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.228649 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-config\") pod \"03f3f243-b385-4a40-aaa9-dc9f97801505\" (UID: \"03f3f243-b385-4a40-aaa9-dc9f97801505\") " Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.249816 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03f3f243-b385-4a40-aaa9-dc9f97801505-kube-api-access-qtjfk" (OuterVolumeSpecName: "kube-api-access-qtjfk") pod "03f3f243-b385-4a40-aaa9-dc9f97801505" (UID: "03f3f243-b385-4a40-aaa9-dc9f97801505"). InnerVolumeSpecName "kube-api-access-qtjfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.269633 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "03f3f243-b385-4a40-aaa9-dc9f97801505" (UID: "03f3f243-b385-4a40-aaa9-dc9f97801505"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.272579 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-config" (OuterVolumeSpecName: "config") pod "03f3f243-b385-4a40-aaa9-dc9f97801505" (UID: "03f3f243-b385-4a40-aaa9-dc9f97801505"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.274645 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "03f3f243-b385-4a40-aaa9-dc9f97801505" (UID: "03f3f243-b385-4a40-aaa9-dc9f97801505"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.333294 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.333326 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.333337 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtjfk\" (UniqueName: \"kubernetes.io/projected/03f3f243-b385-4a40-aaa9-dc9f97801505-kube-api-access-qtjfk\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.333347 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f3f243-b385-4a40-aaa9-dc9f97801505-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.359752 4879 scope.go:117] "RemoveContainer" containerID="d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.386469 4879 scope.go:117] "RemoveContainer" containerID="027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367" Nov 25 14:46:00 crc kubenswrapper[4879]: E1125 14:46:00.387335 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367\": container with ID starting with 027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367 not found: ID does not exist" containerID="027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.387385 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367"} err="failed to get container status \"027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367\": rpc error: code = NotFound desc = could not find container \"027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367\": container with ID starting with 027f381214465bd3f891d3cbe940aa7a64f84ade2e83a14ca00978c51ea6f367 not found: ID does not exist" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.387420 4879 scope.go:117] "RemoveContainer" containerID="d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748" Nov 25 14:46:00 crc kubenswrapper[4879]: E1125 14:46:00.387678 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748\": container with ID starting with d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748 not found: ID does not exist" containerID="d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.387704 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748"} err="failed to get container status \"d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748\": rpc error: code = NotFound desc = could not find container \"d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748\": container with ID starting with d444f4db4f3fba0e24b6d345b528c930aa3c9baf00d6cb778e361ccc01963748 not found: ID does not exist" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.539455 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-cm4j8"] Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.544682 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-cm4j8"] Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.912408 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.941529 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-xb84j"] Nov 25 14:46:00 crc kubenswrapper[4879]: E1125 14:46:00.941846 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerName="init" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.941860 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerName="init" Nov 25 14:46:00 crc kubenswrapper[4879]: E1125 14:46:00.941872 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerName="dnsmasq-dns" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.941877 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerName="dnsmasq-dns" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.942040 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" containerName="dnsmasq-dns" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.942862 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:00 crc kubenswrapper[4879]: I1125 14:46:00.963323 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-xb84j"] Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.051314 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.051374 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-config\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.051394 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.051424 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgv77\" (UniqueName: \"kubernetes.io/projected/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-kube-api-access-hgv77\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.051462 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.152558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.152637 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-config\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.152664 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.152710 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgv77\" (UniqueName: \"kubernetes.io/projected/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-kube-api-access-hgv77\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.152738 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.153793 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.153936 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.153920 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.154143 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-config\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.171733 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgv77\" (UniqueName: \"kubernetes.io/projected/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-kube-api-access-hgv77\") pod \"dnsmasq-dns-b8fbc5445-xb84j\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.217780 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f98073f-daa8-4796-955e-2f7d767d9125","Type":"ContainerStarted","Data":"9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e"} Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.237532 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371980.617264 podStartE2EDuration="56.237511906s" podCreationTimestamp="2025-11-25 14:45:05 +0000 UTC" firstStartedPulling="2025-11-25 14:45:07.778289103 +0000 UTC m=+1199.381702174" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:46:01.235054199 +0000 UTC m=+1252.838467280" watchObservedRunningTime="2025-11-25 14:46:01.237511906 +0000 UTC m=+1252.840924977" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.269347 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.653506 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03f3f243-b385-4a40-aaa9-dc9f97801505" path="/var/lib/kubelet/pods/03f3f243-b385-4a40-aaa9-dc9f97801505/volumes" Nov 25 14:46:01 crc kubenswrapper[4879]: I1125 14:46:01.669312 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-xb84j"] Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.140911 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.146468 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.148345 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.148421 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.148604 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.148607 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-746hf" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.160729 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.231918 4879 generic.go:334] "Generic (PLEG): container finished" podID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerID="77b734dac422c939eb4feed9ab24da67b2dcb6856f1c0be62ce813ed6db75a14" exitCode=0 Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.231958 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" event={"ID":"89f9ccb7-32af-4de5-b9a7-8771989d8a1b","Type":"ContainerDied","Data":"77b734dac422c939eb4feed9ab24da67b2dcb6856f1c0be62ce813ed6db75a14"} Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.231982 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" event={"ID":"89f9ccb7-32af-4de5-b9a7-8771989d8a1b","Type":"ContainerStarted","Data":"ade4ad8f4e3bf9fbbea988b9fd44da7b05ec6f3bc912a00b0e57e7572b02a11c"} Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.273799 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-cache\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.273848 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.273884 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-lock\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.274139 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jv8d\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-kube-api-access-8jv8d\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.274295 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.376250 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jv8d\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-kube-api-access-8jv8d\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.376991 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.377118 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-cache\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.377166 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.377209 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-lock\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.377858 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-lock\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.377953 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-cache\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: E1125 14:46:02.377997 4879 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 25 14:46:02 crc kubenswrapper[4879]: E1125 14:46:02.378019 4879 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 25 14:46:02 crc kubenswrapper[4879]: E1125 14:46:02.378081 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift podName:734e5d8b-907c-4246-adca-6a05a98c0b27 nodeName:}" failed. No retries permitted until 2025-11-25 14:46:02.878053844 +0000 UTC m=+1254.481466915 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift") pod "swift-storage-0" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27") : configmap "swift-ring-files" not found Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.378091 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.400814 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jv8d\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-kube-api-access-8jv8d\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.404455 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: I1125 14:46:02.886457 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:02 crc kubenswrapper[4879]: E1125 14:46:02.886617 4879 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 25 14:46:02 crc kubenswrapper[4879]: E1125 14:46:02.886633 4879 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 25 14:46:02 crc kubenswrapper[4879]: E1125 14:46:02.886666 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift podName:734e5d8b-907c-4246-adca-6a05a98c0b27 nodeName:}" failed. No retries permitted until 2025-11-25 14:46:03.886653262 +0000 UTC m=+1255.490066333 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift") pod "swift-storage-0" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27") : configmap "swift-ring-files" not found Nov 25 14:46:03 crc kubenswrapper[4879]: I1125 14:46:03.240717 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" event={"ID":"89f9ccb7-32af-4de5-b9a7-8771989d8a1b","Type":"ContainerStarted","Data":"41c2542e53d215903d6489c638cfa4d00898988a7f57f64c5234d8ed55e5482a"} Nov 25 14:46:03 crc kubenswrapper[4879]: I1125 14:46:03.240848 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:03 crc kubenswrapper[4879]: I1125 14:46:03.260922 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" podStartSLOduration=3.260902388 podStartE2EDuration="3.260902388s" podCreationTimestamp="2025-11-25 14:46:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:46:03.256013872 +0000 UTC m=+1254.859426943" watchObservedRunningTime="2025-11-25 14:46:03.260902388 +0000 UTC m=+1254.864315459" Nov 25 14:46:03 crc kubenswrapper[4879]: I1125 14:46:03.901961 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:03 crc kubenswrapper[4879]: E1125 14:46:03.902266 4879 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 25 14:46:03 crc kubenswrapper[4879]: E1125 14:46:03.902379 4879 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 25 14:46:03 crc kubenswrapper[4879]: E1125 14:46:03.902451 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift podName:734e5d8b-907c-4246-adca-6a05a98c0b27 nodeName:}" failed. No retries permitted until 2025-11-25 14:46:05.902425521 +0000 UTC m=+1257.505838602 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift") pod "swift-storage-0" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27") : configmap "swift-ring-files" not found Nov 25 14:46:05 crc kubenswrapper[4879]: I1125 14:46:05.933926 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:05 crc kubenswrapper[4879]: E1125 14:46:05.934193 4879 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 25 14:46:05 crc kubenswrapper[4879]: E1125 14:46:05.935250 4879 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 25 14:46:05 crc kubenswrapper[4879]: E1125 14:46:05.935339 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift podName:734e5d8b-907c-4246-adca-6a05a98c0b27 nodeName:}" failed. No retries permitted until 2025-11-25 14:46:09.935314926 +0000 UTC m=+1261.538727997 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift") pod "swift-storage-0" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27") : configmap "swift-ring-files" not found Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.079992 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-cjhrw"] Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.081148 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.083313 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.083359 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.083313 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.172881 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-cjhrw"] Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.246300 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/480a7b19-c9e0-41c2-b4cd-af083572f083-etc-swift\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.246370 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb6zr\" (UniqueName: \"kubernetes.io/projected/480a7b19-c9e0-41c2-b4cd-af083572f083-kube-api-access-xb6zr\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.246469 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-scripts\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.246957 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-swiftconf\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.247194 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-dispersionconf\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.247242 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-ring-data-devices\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.247283 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-combined-ca-bundle\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.348812 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/480a7b19-c9e0-41c2-b4cd-af083572f083-etc-swift\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.348874 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb6zr\" (UniqueName: \"kubernetes.io/projected/480a7b19-c9e0-41c2-b4cd-af083572f083-kube-api-access-xb6zr\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.348951 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-scripts\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.348984 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-swiftconf\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.349063 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-dispersionconf\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.349092 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-ring-data-devices\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.349134 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-combined-ca-bundle\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.350991 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-scripts\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.351049 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/480a7b19-c9e0-41c2-b4cd-af083572f083-etc-swift\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.351438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-ring-data-devices\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.358692 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-dispersionconf\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.359101 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-swiftconf\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.360765 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-combined-ca-bundle\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.380887 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb6zr\" (UniqueName: \"kubernetes.io/projected/480a7b19-c9e0-41c2-b4cd-af083572f083-kube-api-access-xb6zr\") pod \"swift-ring-rebalance-cjhrw\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.407774 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:06 crc kubenswrapper[4879]: I1125 14:46:06.887369 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-cjhrw"] Nov 25 14:46:06 crc kubenswrapper[4879]: W1125 14:46:06.891311 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod480a7b19_c9e0_41c2_b4cd_af083572f083.slice/crio-07b7b4c8ed79391439cfbf95568de07b9dd1a263681df2fc3505b58f150f41b0 WatchSource:0}: Error finding container 07b7b4c8ed79391439cfbf95568de07b9dd1a263681df2fc3505b58f150f41b0: Status 404 returned error can't find the container with id 07b7b4c8ed79391439cfbf95568de07b9dd1a263681df2fc3505b58f150f41b0 Nov 25 14:46:07 crc kubenswrapper[4879]: I1125 14:46:07.276726 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-cjhrw" event={"ID":"480a7b19-c9e0-41c2-b4cd-af083572f083","Type":"ContainerStarted","Data":"07b7b4c8ed79391439cfbf95568de07b9dd1a263681df2fc3505b58f150f41b0"} Nov 25 14:46:07 crc kubenswrapper[4879]: I1125 14:46:07.280664 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 25 14:46:07 crc kubenswrapper[4879]: I1125 14:46:07.280756 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 25 14:46:08 crc kubenswrapper[4879]: I1125 14:46:08.835317 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 25 14:46:10 crc kubenswrapper[4879]: I1125 14:46:10.024432 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:10 crc kubenswrapper[4879]: E1125 14:46:10.024655 4879 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 25 14:46:10 crc kubenswrapper[4879]: E1125 14:46:10.024686 4879 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 25 14:46:10 crc kubenswrapper[4879]: E1125 14:46:10.024759 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift podName:734e5d8b-907c-4246-adca-6a05a98c0b27 nodeName:}" failed. No retries permitted until 2025-11-25 14:46:18.024736399 +0000 UTC m=+1269.628149470 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift") pod "swift-storage-0" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27") : configmap "swift-ring-files" not found Nov 25 14:46:11 crc kubenswrapper[4879]: I1125 14:46:11.271464 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:46:11 crc kubenswrapper[4879]: I1125 14:46:11.330323 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-2pn6h"] Nov 25 14:46:11 crc kubenswrapper[4879]: I1125 14:46:11.330548 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-2pn6h" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerName="dnsmasq-dns" containerID="cri-o://f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81" gracePeriod=10 Nov 25 14:46:11 crc kubenswrapper[4879]: I1125 14:46:11.917234 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.057282 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-sb\") pod \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.057368 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-nb\") pod \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.057417 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw7rt\" (UniqueName: \"kubernetes.io/projected/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-kube-api-access-vw7rt\") pod \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.057536 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-dns-svc\") pod \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.057567 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-config\") pod \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\" (UID: \"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61\") " Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.064191 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-kube-api-access-vw7rt" (OuterVolumeSpecName: "kube-api-access-vw7rt") pod "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" (UID: "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61"). InnerVolumeSpecName "kube-api-access-vw7rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.100202 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" (UID: "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.104602 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" (UID: "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.115609 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-config" (OuterVolumeSpecName: "config") pod "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" (UID: "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.119652 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" (UID: "232b69b4-9ca3-4a89-bb4d-b6ca430cbb61"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.159948 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.159989 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.160004 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.160016 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.160025 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw7rt\" (UniqueName: \"kubernetes.io/projected/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61-kube-api-access-vw7rt\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.315206 4879 generic.go:334] "Generic (PLEG): container finished" podID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerID="f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81" exitCode=0 Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.315263 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-2pn6h" event={"ID":"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61","Type":"ContainerDied","Data":"f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81"} Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.315294 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-2pn6h" event={"ID":"232b69b4-9ca3-4a89-bb4d-b6ca430cbb61","Type":"ContainerDied","Data":"c512712c55265b6f47eccad060c050fad98fe90bddb1fcfd062870db913d5e2c"} Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.315315 4879 scope.go:117] "RemoveContainer" containerID="f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.315480 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-2pn6h" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.352988 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-2pn6h"] Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.359880 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-2pn6h"] Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.424617 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 25 14:46:12 crc kubenswrapper[4879]: I1125 14:46:12.530860 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 25 14:46:13 crc kubenswrapper[4879]: I1125 14:46:13.655648 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" path="/var/lib/kubelet/pods/232b69b4-9ca3-4a89-bb4d-b6ca430cbb61/volumes" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.263548 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-4d84-account-create-2z2fh"] Nov 25 14:46:14 crc kubenswrapper[4879]: E1125 14:46:14.263870 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerName="init" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.263881 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerName="init" Nov 25 14:46:14 crc kubenswrapper[4879]: E1125 14:46:14.263897 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerName="dnsmasq-dns" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.263904 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerName="dnsmasq-dns" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.264058 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="232b69b4-9ca3-4a89-bb4d-b6ca430cbb61" containerName="dnsmasq-dns" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.264652 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.269552 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.272460 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4d84-account-create-2z2fh"] Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.318876 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-vlf6s"] Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.328269 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.339710 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-vlf6s"] Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.398899 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c36e8400-c985-4fb1-aab9-8cced923f7ed-operator-scripts\") pod \"glance-4d84-account-create-2z2fh\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.398988 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffzs2\" (UniqueName: \"kubernetes.io/projected/c36e8400-c985-4fb1-aab9-8cced923f7ed-kube-api-access-ffzs2\") pod \"glance-4d84-account-create-2z2fh\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.399140 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7742abe4-cffe-4f79-b818-75a364174832-operator-scripts\") pod \"glance-db-create-vlf6s\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.399180 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb6cc\" (UniqueName: \"kubernetes.io/projected/7742abe4-cffe-4f79-b818-75a364174832-kube-api-access-qb6cc\") pod \"glance-db-create-vlf6s\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.501145 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c36e8400-c985-4fb1-aab9-8cced923f7ed-operator-scripts\") pod \"glance-4d84-account-create-2z2fh\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.501240 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffzs2\" (UniqueName: \"kubernetes.io/projected/c36e8400-c985-4fb1-aab9-8cced923f7ed-kube-api-access-ffzs2\") pod \"glance-4d84-account-create-2z2fh\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.501359 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7742abe4-cffe-4f79-b818-75a364174832-operator-scripts\") pod \"glance-db-create-vlf6s\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.501415 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb6cc\" (UniqueName: \"kubernetes.io/projected/7742abe4-cffe-4f79-b818-75a364174832-kube-api-access-qb6cc\") pod \"glance-db-create-vlf6s\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.502218 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c36e8400-c985-4fb1-aab9-8cced923f7ed-operator-scripts\") pod \"glance-4d84-account-create-2z2fh\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.503319 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7742abe4-cffe-4f79-b818-75a364174832-operator-scripts\") pod \"glance-db-create-vlf6s\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.521920 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb6cc\" (UniqueName: \"kubernetes.io/projected/7742abe4-cffe-4f79-b818-75a364174832-kube-api-access-qb6cc\") pod \"glance-db-create-vlf6s\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.522099 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffzs2\" (UniqueName: \"kubernetes.io/projected/c36e8400-c985-4fb1-aab9-8cced923f7ed-kube-api-access-ffzs2\") pod \"glance-4d84-account-create-2z2fh\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.612588 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:14 crc kubenswrapper[4879]: I1125 14:46:14.645803 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.241302 4879 scope.go:117] "RemoveContainer" containerID="2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646" Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.339549 4879 scope.go:117] "RemoveContainer" containerID="f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81" Nov 25 14:46:17 crc kubenswrapper[4879]: E1125 14:46:17.340091 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81\": container with ID starting with f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81 not found: ID does not exist" containerID="f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81" Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.340173 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81"} err="failed to get container status \"f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81\": rpc error: code = NotFound desc = could not find container \"f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81\": container with ID starting with f2af8fd8cfafe83504642dd750e780c1174ff9ddaaae22652989ed2d56d1fe81 not found: ID does not exist" Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.340208 4879 scope.go:117] "RemoveContainer" containerID="2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646" Nov 25 14:46:17 crc kubenswrapper[4879]: E1125 14:46:17.340550 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646\": container with ID starting with 2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646 not found: ID does not exist" containerID="2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646" Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.340610 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646"} err="failed to get container status \"2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646\": rpc error: code = NotFound desc = could not find container \"2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646\": container with ID starting with 2e4196493425b9d95ac42f6305958dc14b9287adca887ad86812a8885324c646 not found: ID does not exist" Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.759583 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4d84-account-create-2z2fh"] Nov 25 14:46:17 crc kubenswrapper[4879]: W1125 14:46:17.768627 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc36e8400_c985_4fb1_aab9_8cced923f7ed.slice/crio-8ffce52569340ff69c47dfba89b7a315929e687ffdf862b47bbd2ff27c4f4264 WatchSource:0}: Error finding container 8ffce52569340ff69c47dfba89b7a315929e687ffdf862b47bbd2ff27c4f4264: Status 404 returned error can't find the container with id 8ffce52569340ff69c47dfba89b7a315929e687ffdf862b47bbd2ff27c4f4264 Nov 25 14:46:17 crc kubenswrapper[4879]: I1125 14:46:17.828260 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-vlf6s"] Nov 25 14:46:17 crc kubenswrapper[4879]: W1125 14:46:17.828729 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7742abe4_cffe_4f79_b818_75a364174832.slice/crio-853f5b374810a2d1d33f4a4be1f2ca1bf3863526aa552601d106e993fd1ea82c WatchSource:0}: Error finding container 853f5b374810a2d1d33f4a4be1f2ca1bf3863526aa552601d106e993fd1ea82c: Status 404 returned error can't find the container with id 853f5b374810a2d1d33f4a4be1f2ca1bf3863526aa552601d106e993fd1ea82c Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.111405 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:18 crc kubenswrapper[4879]: E1125 14:46:18.111672 4879 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 25 14:46:18 crc kubenswrapper[4879]: E1125 14:46:18.111752 4879 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 25 14:46:18 crc kubenswrapper[4879]: E1125 14:46:18.111796 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift podName:734e5d8b-907c-4246-adca-6a05a98c0b27 nodeName:}" failed. No retries permitted until 2025-11-25 14:46:34.111782662 +0000 UTC m=+1285.715195733 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift") pod "swift-storage-0" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27") : configmap "swift-ring-files" not found Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.515031 4879 generic.go:334] "Generic (PLEG): container finished" podID="c36e8400-c985-4fb1-aab9-8cced923f7ed" containerID="60a5a0e65585a2bc5eebda61c00e943a714eb25b99f7e1142ce1d52ab77390f4" exitCode=0 Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.515072 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4d84-account-create-2z2fh" event={"ID":"c36e8400-c985-4fb1-aab9-8cced923f7ed","Type":"ContainerDied","Data":"60a5a0e65585a2bc5eebda61c00e943a714eb25b99f7e1142ce1d52ab77390f4"} Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.515143 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4d84-account-create-2z2fh" event={"ID":"c36e8400-c985-4fb1-aab9-8cced923f7ed","Type":"ContainerStarted","Data":"8ffce52569340ff69c47dfba89b7a315929e687ffdf862b47bbd2ff27c4f4264"} Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.516343 4879 generic.go:334] "Generic (PLEG): container finished" podID="7742abe4-cffe-4f79-b818-75a364174832" containerID="c1e6e0ff9d243448044098d3232995e2d1750e73419d17539f646a12b915a82f" exitCode=0 Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.516393 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vlf6s" event={"ID":"7742abe4-cffe-4f79-b818-75a364174832","Type":"ContainerDied","Data":"c1e6e0ff9d243448044098d3232995e2d1750e73419d17539f646a12b915a82f"} Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.516411 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vlf6s" event={"ID":"7742abe4-cffe-4f79-b818-75a364174832","Type":"ContainerStarted","Data":"853f5b374810a2d1d33f4a4be1f2ca1bf3863526aa552601d106e993fd1ea82c"} Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.517764 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-cjhrw" event={"ID":"480a7b19-c9e0-41c2-b4cd-af083572f083","Type":"ContainerStarted","Data":"66d1cb297f19e2391f50e7a7f3d03631d216851e0f2b568c9abf77918731b2c4"} Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.562243 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-cjhrw" podStartSLOduration=2.097122474 podStartE2EDuration="12.562226709s" podCreationTimestamp="2025-11-25 14:46:06 +0000 UTC" firstStartedPulling="2025-11-25 14:46:06.893152528 +0000 UTC m=+1258.496565619" lastFinishedPulling="2025-11-25 14:46:17.358256783 +0000 UTC m=+1268.961669854" observedRunningTime="2025-11-25 14:46:18.560601144 +0000 UTC m=+1270.164014225" watchObservedRunningTime="2025-11-25 14:46:18.562226709 +0000 UTC m=+1270.165639780" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.651961 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-cdz4d"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.654144 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.662564 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cdz4d"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.740683 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gml5w" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" probeResult="failure" output=< Nov 25 14:46:18 crc kubenswrapper[4879]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 25 14:46:18 crc kubenswrapper[4879]: > Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.764728 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-e239-account-create-f8pc8"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.766139 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.768238 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.784105 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-e239-account-create-f8pc8"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.824089 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcjlr\" (UniqueName: \"kubernetes.io/projected/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-kube-api-access-fcjlr\") pod \"keystone-db-create-cdz4d\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.824175 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-operator-scripts\") pod \"keystone-db-create-cdz4d\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.863240 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-7q792"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.866377 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7q792" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.874716 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-7q792"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.926685 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcjlr\" (UniqueName: \"kubernetes.io/projected/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-kube-api-access-fcjlr\") pod \"keystone-db-create-cdz4d\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.926768 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-operator-scripts\") pod \"keystone-db-create-cdz4d\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.926796 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hgfb\" (UniqueName: \"kubernetes.io/projected/5fc80e72-1910-4244-ba55-7046e4a9a5f1-kube-api-access-6hgfb\") pod \"keystone-e239-account-create-f8pc8\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.926844 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fc80e72-1910-4244-ba55-7046e4a9a5f1-operator-scripts\") pod \"keystone-e239-account-create-f8pc8\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.927792 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-operator-scripts\") pod \"keystone-db-create-cdz4d\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.947787 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcjlr\" (UniqueName: \"kubernetes.io/projected/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-kube-api-access-fcjlr\") pod \"keystone-db-create-cdz4d\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.989072 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-30f0-account-create-7k4s4"] Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.990261 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:18 crc kubenswrapper[4879]: I1125 14:46:18.992323 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.001865 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-30f0-account-create-7k4s4"] Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.013320 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.030425 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dab1f06c-c032-4e72-8d6b-313eb41c893e-operator-scripts\") pod \"placement-db-create-7q792\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.030483 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-826bd\" (UniqueName: \"kubernetes.io/projected/dab1f06c-c032-4e72-8d6b-313eb41c893e-kube-api-access-826bd\") pod \"placement-db-create-7q792\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.030527 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hgfb\" (UniqueName: \"kubernetes.io/projected/5fc80e72-1910-4244-ba55-7046e4a9a5f1-kube-api-access-6hgfb\") pod \"keystone-e239-account-create-f8pc8\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.030565 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fc80e72-1910-4244-ba55-7046e4a9a5f1-operator-scripts\") pod \"keystone-e239-account-create-f8pc8\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.031393 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fc80e72-1910-4244-ba55-7046e4a9a5f1-operator-scripts\") pod \"keystone-e239-account-create-f8pc8\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.045925 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hgfb\" (UniqueName: \"kubernetes.io/projected/5fc80e72-1910-4244-ba55-7046e4a9a5f1-kube-api-access-6hgfb\") pod \"keystone-e239-account-create-f8pc8\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.084737 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.132751 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dab1f06c-c032-4e72-8d6b-313eb41c893e-operator-scripts\") pod \"placement-db-create-7q792\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.132817 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-826bd\" (UniqueName: \"kubernetes.io/projected/dab1f06c-c032-4e72-8d6b-313eb41c893e-kube-api-access-826bd\") pod \"placement-db-create-7q792\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.132849 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3d76cb1-a568-45e0-ac32-903f49ffbe54-operator-scripts\") pod \"placement-30f0-account-create-7k4s4\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.132991 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mq9j\" (UniqueName: \"kubernetes.io/projected/f3d76cb1-a568-45e0-ac32-903f49ffbe54-kube-api-access-5mq9j\") pod \"placement-30f0-account-create-7k4s4\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.136489 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dab1f06c-c032-4e72-8d6b-313eb41c893e-operator-scripts\") pod \"placement-db-create-7q792\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.152943 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-826bd\" (UniqueName: \"kubernetes.io/projected/dab1f06c-c032-4e72-8d6b-313eb41c893e-kube-api-access-826bd\") pod \"placement-db-create-7q792\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.186935 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7q792" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.234668 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3d76cb1-a568-45e0-ac32-903f49ffbe54-operator-scripts\") pod \"placement-30f0-account-create-7k4s4\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.234754 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mq9j\" (UniqueName: \"kubernetes.io/projected/f3d76cb1-a568-45e0-ac32-903f49ffbe54-kube-api-access-5mq9j\") pod \"placement-30f0-account-create-7k4s4\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.237249 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3d76cb1-a568-45e0-ac32-903f49ffbe54-operator-scripts\") pod \"placement-30f0-account-create-7k4s4\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.262928 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mq9j\" (UniqueName: \"kubernetes.io/projected/f3d76cb1-a568-45e0-ac32-903f49ffbe54-kube-api-access-5mq9j\") pod \"placement-30f0-account-create-7k4s4\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.319571 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.472503 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cdz4d"] Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.533276 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cdz4d" event={"ID":"33f1fc15-7c2c-4fa1-94d8-704a644db8bb","Type":"ContainerStarted","Data":"f97f2bda9b981050d7ff5b844ccd24b939a90fc3da7ee300c743c50e63f85580"} Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.576167 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-e239-account-create-f8pc8"] Nov 25 14:46:19 crc kubenswrapper[4879]: W1125 14:46:19.597144 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fc80e72_1910_4244_ba55_7046e4a9a5f1.slice/crio-09aad270bb8ad4767c00149c2b52f1b538fa60b899f9e2b8ff6e613a8f57a32f WatchSource:0}: Error finding container 09aad270bb8ad4767c00149c2b52f1b538fa60b899f9e2b8ff6e613a8f57a32f: Status 404 returned error can't find the container with id 09aad270bb8ad4767c00149c2b52f1b538fa60b899f9e2b8ff6e613a8f57a32f Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.672507 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-7q792"] Nov 25 14:46:19 crc kubenswrapper[4879]: W1125 14:46:19.678995 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddab1f06c_c032_4e72_8d6b_313eb41c893e.slice/crio-84aea03b37c60ce15fc7f6dd99f2f7f55879ce7b1566de1a5148980f40045a42 WatchSource:0}: Error finding container 84aea03b37c60ce15fc7f6dd99f2f7f55879ce7b1566de1a5148980f40045a42: Status 404 returned error can't find the container with id 84aea03b37c60ce15fc7f6dd99f2f7f55879ce7b1566de1a5148980f40045a42 Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.786646 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-30f0-account-create-7k4s4"] Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.906514 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:19 crc kubenswrapper[4879]: I1125 14:46:19.936518 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.060783 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb6cc\" (UniqueName: \"kubernetes.io/projected/7742abe4-cffe-4f79-b818-75a364174832-kube-api-access-qb6cc\") pod \"7742abe4-cffe-4f79-b818-75a364174832\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.060967 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7742abe4-cffe-4f79-b818-75a364174832-operator-scripts\") pod \"7742abe4-cffe-4f79-b818-75a364174832\" (UID: \"7742abe4-cffe-4f79-b818-75a364174832\") " Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.061053 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffzs2\" (UniqueName: \"kubernetes.io/projected/c36e8400-c985-4fb1-aab9-8cced923f7ed-kube-api-access-ffzs2\") pod \"c36e8400-c985-4fb1-aab9-8cced923f7ed\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.061295 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7742abe4-cffe-4f79-b818-75a364174832-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7742abe4-cffe-4f79-b818-75a364174832" (UID: "7742abe4-cffe-4f79-b818-75a364174832"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.062581 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c36e8400-c985-4fb1-aab9-8cced923f7ed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c36e8400-c985-4fb1-aab9-8cced923f7ed" (UID: "c36e8400-c985-4fb1-aab9-8cced923f7ed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.062724 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c36e8400-c985-4fb1-aab9-8cced923f7ed-operator-scripts\") pod \"c36e8400-c985-4fb1-aab9-8cced923f7ed\" (UID: \"c36e8400-c985-4fb1-aab9-8cced923f7ed\") " Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.063348 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7742abe4-cffe-4f79-b818-75a364174832-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.063370 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c36e8400-c985-4fb1-aab9-8cced923f7ed-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.066387 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7742abe4-cffe-4f79-b818-75a364174832-kube-api-access-qb6cc" (OuterVolumeSpecName: "kube-api-access-qb6cc") pod "7742abe4-cffe-4f79-b818-75a364174832" (UID: "7742abe4-cffe-4f79-b818-75a364174832"). InnerVolumeSpecName "kube-api-access-qb6cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.066606 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36e8400-c985-4fb1-aab9-8cced923f7ed-kube-api-access-ffzs2" (OuterVolumeSpecName: "kube-api-access-ffzs2") pod "c36e8400-c985-4fb1-aab9-8cced923f7ed" (UID: "c36e8400-c985-4fb1-aab9-8cced923f7ed"). InnerVolumeSpecName "kube-api-access-ffzs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.164668 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb6cc\" (UniqueName: \"kubernetes.io/projected/7742abe4-cffe-4f79-b818-75a364174832-kube-api-access-qb6cc\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.164715 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffzs2\" (UniqueName: \"kubernetes.io/projected/c36e8400-c985-4fb1-aab9-8cced923f7ed-kube-api-access-ffzs2\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.541719 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7q792" event={"ID":"dab1f06c-c032-4e72-8d6b-313eb41c893e","Type":"ContainerStarted","Data":"a4898d7db1c1ab0d995ff0bade8718c92a4ed14360d6c8adef0afed06d89ace7"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.541872 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7q792" event={"ID":"dab1f06c-c032-4e72-8d6b-313eb41c893e","Type":"ContainerStarted","Data":"84aea03b37c60ce15fc7f6dd99f2f7f55879ce7b1566de1a5148980f40045a42"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.542983 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cdz4d" event={"ID":"33f1fc15-7c2c-4fa1-94d8-704a644db8bb","Type":"ContainerStarted","Data":"31672bdc9b2a12f616293fa7b4333480edcb9675623c5573e6a892c73ff26326"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.545261 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-30f0-account-create-7k4s4" event={"ID":"f3d76cb1-a568-45e0-ac32-903f49ffbe54","Type":"ContainerStarted","Data":"75f7f2f6ce0d7c84fb222485fd7de19899838f066a478987f69f8229457eaa90"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.545319 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-30f0-account-create-7k4s4" event={"ID":"f3d76cb1-a568-45e0-ac32-903f49ffbe54","Type":"ContainerStarted","Data":"3479f0addb410520c2226a555587f198e12e7ee9a77e8240c760265dc733952c"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.549569 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4d84-account-create-2z2fh" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.549590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4d84-account-create-2z2fh" event={"ID":"c36e8400-c985-4fb1-aab9-8cced923f7ed","Type":"ContainerDied","Data":"8ffce52569340ff69c47dfba89b7a315929e687ffdf862b47bbd2ff27c4f4264"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.550173 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ffce52569340ff69c47dfba89b7a315929e687ffdf862b47bbd2ff27c4f4264" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.551564 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e239-account-create-f8pc8" event={"ID":"5fc80e72-1910-4244-ba55-7046e4a9a5f1","Type":"ContainerStarted","Data":"01d98062e3558ed331fc75ea66b8e2865a9d6e32d2261a8a4e32c7c3cc6584de"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.551601 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e239-account-create-f8pc8" event={"ID":"5fc80e72-1910-4244-ba55-7046e4a9a5f1","Type":"ContainerStarted","Data":"09aad270bb8ad4767c00149c2b52f1b538fa60b899f9e2b8ff6e613a8f57a32f"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.553030 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-vlf6s" event={"ID":"7742abe4-cffe-4f79-b818-75a364174832","Type":"ContainerDied","Data":"853f5b374810a2d1d33f4a4be1f2ca1bf3863526aa552601d106e993fd1ea82c"} Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.553062 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="853f5b374810a2d1d33f4a4be1f2ca1bf3863526aa552601d106e993fd1ea82c" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.553140 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-vlf6s" Nov 25 14:46:20 crc kubenswrapper[4879]: I1125 14:46:20.560065 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-cdz4d" podStartSLOduration=2.560045041 podStartE2EDuration="2.560045041s" podCreationTimestamp="2025-11-25 14:46:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:46:20.556077231 +0000 UTC m=+1272.159490302" watchObservedRunningTime="2025-11-25 14:46:20.560045041 +0000 UTC m=+1272.163458112" Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.570788 4879 generic.go:334] "Generic (PLEG): container finished" podID="f3d76cb1-a568-45e0-ac32-903f49ffbe54" containerID="75f7f2f6ce0d7c84fb222485fd7de19899838f066a478987f69f8229457eaa90" exitCode=0 Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.570883 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-30f0-account-create-7k4s4" event={"ID":"f3d76cb1-a568-45e0-ac32-903f49ffbe54","Type":"ContainerDied","Data":"75f7f2f6ce0d7c84fb222485fd7de19899838f066a478987f69f8229457eaa90"} Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.572013 4879 generic.go:334] "Generic (PLEG): container finished" podID="5fc80e72-1910-4244-ba55-7046e4a9a5f1" containerID="01d98062e3558ed331fc75ea66b8e2865a9d6e32d2261a8a4e32c7c3cc6584de" exitCode=0 Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.572068 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e239-account-create-f8pc8" event={"ID":"5fc80e72-1910-4244-ba55-7046e4a9a5f1","Type":"ContainerDied","Data":"01d98062e3558ed331fc75ea66b8e2865a9d6e32d2261a8a4e32c7c3cc6584de"} Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.573163 4879 generic.go:334] "Generic (PLEG): container finished" podID="dab1f06c-c032-4e72-8d6b-313eb41c893e" containerID="a4898d7db1c1ab0d995ff0bade8718c92a4ed14360d6c8adef0afed06d89ace7" exitCode=0 Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.573215 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7q792" event={"ID":"dab1f06c-c032-4e72-8d6b-313eb41c893e","Type":"ContainerDied","Data":"a4898d7db1c1ab0d995ff0bade8718c92a4ed14360d6c8adef0afed06d89ace7"} Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.574205 4879 generic.go:334] "Generic (PLEG): container finished" podID="33f1fc15-7c2c-4fa1-94d8-704a644db8bb" containerID="31672bdc9b2a12f616293fa7b4333480edcb9675623c5573e6a892c73ff26326" exitCode=0 Nov 25 14:46:21 crc kubenswrapper[4879]: I1125 14:46:21.574235 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cdz4d" event={"ID":"33f1fc15-7c2c-4fa1-94d8-704a644db8bb","Type":"ContainerDied","Data":"31672bdc9b2a12f616293fa7b4333480edcb9675623c5573e6a892c73ff26326"} Nov 25 14:46:22 crc kubenswrapper[4879]: I1125 14:46:22.582157 4879 generic.go:334] "Generic (PLEG): container finished" podID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerID="cf474471d6f23adf5b05a9b2ace8992e9dc0e24725d55440de35a374bae09a8a" exitCode=0 Nov 25 14:46:22 crc kubenswrapper[4879]: I1125 14:46:22.582251 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1814b22-d1b3-4426-9fa2-f613640f63e8","Type":"ContainerDied","Data":"cf474471d6f23adf5b05a9b2ace8992e9dc0e24725d55440de35a374bae09a8a"} Nov 25 14:46:22 crc kubenswrapper[4879]: I1125 14:46:22.965304 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7q792" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.030811 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.042273 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.056362 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.116725 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-826bd\" (UniqueName: \"kubernetes.io/projected/dab1f06c-c032-4e72-8d6b-313eb41c893e-kube-api-access-826bd\") pod \"dab1f06c-c032-4e72-8d6b-313eb41c893e\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.116866 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dab1f06c-c032-4e72-8d6b-313eb41c893e-operator-scripts\") pod \"dab1f06c-c032-4e72-8d6b-313eb41c893e\" (UID: \"dab1f06c-c032-4e72-8d6b-313eb41c893e\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.117514 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dab1f06c-c032-4e72-8d6b-313eb41c893e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dab1f06c-c032-4e72-8d6b-313eb41c893e" (UID: "dab1f06c-c032-4e72-8d6b-313eb41c893e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.120905 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dab1f06c-c032-4e72-8d6b-313eb41c893e-kube-api-access-826bd" (OuterVolumeSpecName: "kube-api-access-826bd") pod "dab1f06c-c032-4e72-8d6b-313eb41c893e" (UID: "dab1f06c-c032-4e72-8d6b-313eb41c893e"). InnerVolumeSpecName "kube-api-access-826bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218040 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcjlr\" (UniqueName: \"kubernetes.io/projected/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-kube-api-access-fcjlr\") pod \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218090 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3d76cb1-a568-45e0-ac32-903f49ffbe54-operator-scripts\") pod \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218175 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hgfb\" (UniqueName: \"kubernetes.io/projected/5fc80e72-1910-4244-ba55-7046e4a9a5f1-kube-api-access-6hgfb\") pod \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218210 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mq9j\" (UniqueName: \"kubernetes.io/projected/f3d76cb1-a568-45e0-ac32-903f49ffbe54-kube-api-access-5mq9j\") pod \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\" (UID: \"f3d76cb1-a568-45e0-ac32-903f49ffbe54\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218240 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-operator-scripts\") pod \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\" (UID: \"33f1fc15-7c2c-4fa1-94d8-704a644db8bb\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218316 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fc80e72-1910-4244-ba55-7046e4a9a5f1-operator-scripts\") pod \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\" (UID: \"5fc80e72-1910-4244-ba55-7046e4a9a5f1\") " Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218838 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fc80e72-1910-4244-ba55-7046e4a9a5f1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5fc80e72-1910-4244-ba55-7046e4a9a5f1" (UID: "5fc80e72-1910-4244-ba55-7046e4a9a5f1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.218891 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33f1fc15-7c2c-4fa1-94d8-704a644db8bb" (UID: "33f1fc15-7c2c-4fa1-94d8-704a644db8bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.219169 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3d76cb1-a568-45e0-ac32-903f49ffbe54-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f3d76cb1-a568-45e0-ac32-903f49ffbe54" (UID: "f3d76cb1-a568-45e0-ac32-903f49ffbe54"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.219248 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.219268 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fc80e72-1910-4244-ba55-7046e4a9a5f1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.219280 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-826bd\" (UniqueName: \"kubernetes.io/projected/dab1f06c-c032-4e72-8d6b-313eb41c893e-kube-api-access-826bd\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.219293 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dab1f06c-c032-4e72-8d6b-313eb41c893e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.221423 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-kube-api-access-fcjlr" (OuterVolumeSpecName: "kube-api-access-fcjlr") pod "33f1fc15-7c2c-4fa1-94d8-704a644db8bb" (UID: "33f1fc15-7c2c-4fa1-94d8-704a644db8bb"). InnerVolumeSpecName "kube-api-access-fcjlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.221694 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3d76cb1-a568-45e0-ac32-903f49ffbe54-kube-api-access-5mq9j" (OuterVolumeSpecName: "kube-api-access-5mq9j") pod "f3d76cb1-a568-45e0-ac32-903f49ffbe54" (UID: "f3d76cb1-a568-45e0-ac32-903f49ffbe54"). InnerVolumeSpecName "kube-api-access-5mq9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.222518 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fc80e72-1910-4244-ba55-7046e4a9a5f1-kube-api-access-6hgfb" (OuterVolumeSpecName: "kube-api-access-6hgfb") pod "5fc80e72-1910-4244-ba55-7046e4a9a5f1" (UID: "5fc80e72-1910-4244-ba55-7046e4a9a5f1"). InnerVolumeSpecName "kube-api-access-6hgfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.321107 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcjlr\" (UniqueName: \"kubernetes.io/projected/33f1fc15-7c2c-4fa1-94d8-704a644db8bb-kube-api-access-fcjlr\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.321173 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3d76cb1-a568-45e0-ac32-903f49ffbe54-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.321187 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hgfb\" (UniqueName: \"kubernetes.io/projected/5fc80e72-1910-4244-ba55-7046e4a9a5f1-kube-api-access-6hgfb\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.321201 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mq9j\" (UniqueName: \"kubernetes.io/projected/f3d76cb1-a568-45e0-ac32-903f49ffbe54-kube-api-access-5mq9j\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.590634 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-30f0-account-create-7k4s4" event={"ID":"f3d76cb1-a568-45e0-ac32-903f49ffbe54","Type":"ContainerDied","Data":"3479f0addb410520c2226a555587f198e12e7ee9a77e8240c760265dc733952c"} Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.590681 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3479f0addb410520c2226a555587f198e12e7ee9a77e8240c760265dc733952c" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.590647 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-30f0-account-create-7k4s4" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.593474 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e239-account-create-f8pc8" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.593596 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e239-account-create-f8pc8" event={"ID":"5fc80e72-1910-4244-ba55-7046e4a9a5f1","Type":"ContainerDied","Data":"09aad270bb8ad4767c00149c2b52f1b538fa60b899f9e2b8ff6e613a8f57a32f"} Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.593959 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09aad270bb8ad4767c00149c2b52f1b538fa60b899f9e2b8ff6e613a8f57a32f" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.595316 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-7q792" event={"ID":"dab1f06c-c032-4e72-8d6b-313eb41c893e","Type":"ContainerDied","Data":"84aea03b37c60ce15fc7f6dd99f2f7f55879ce7b1566de1a5148980f40045a42"} Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.595721 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84aea03b37c60ce15fc7f6dd99f2f7f55879ce7b1566de1a5148980f40045a42" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.595346 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-7q792" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.596870 4879 generic.go:334] "Generic (PLEG): container finished" podID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerID="a476d9d71c0fa760bf1e708c942cc87f51d050d16da32ce40f524f553587e4f7" exitCode=0 Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.596952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd","Type":"ContainerDied","Data":"a476d9d71c0fa760bf1e708c942cc87f51d050d16da32ce40f524f553587e4f7"} Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.606432 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cdz4d" event={"ID":"33f1fc15-7c2c-4fa1-94d8-704a644db8bb","Type":"ContainerDied","Data":"f97f2bda9b981050d7ff5b844ccd24b939a90fc3da7ee300c743c50e63f85580"} Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.606474 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f97f2bda9b981050d7ff5b844ccd24b939a90fc3da7ee300c743c50e63f85580" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.606558 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cdz4d" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.614630 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1814b22-d1b3-4426-9fa2-f613640f63e8","Type":"ContainerStarted","Data":"95617f394d3d7080ab4131a8c7c59dc4d2711ba5f40712fcaa511fe4dd6ad911"} Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.617196 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.825040 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.836472 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.840059 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-gml5w" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" probeResult="failure" output=< Nov 25 14:46:23 crc kubenswrapper[4879]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 25 14:46:23 crc kubenswrapper[4879]: > Nov 25 14:46:23 crc kubenswrapper[4879]: I1125 14:46:23.853170 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.48689479 podStartE2EDuration="1m19.85315234s" podCreationTimestamp="2025-11-25 14:45:04 +0000 UTC" firstStartedPulling="2025-11-25 14:45:06.96327277 +0000 UTC m=+1198.566685841" lastFinishedPulling="2025-11-25 14:45:47.32953033 +0000 UTC m=+1238.932943391" observedRunningTime="2025-11-25 14:46:23.690708887 +0000 UTC m=+1275.294121958" watchObservedRunningTime="2025-11-25 14:46:23.85315234 +0000 UTC m=+1275.456565411" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064152 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gml5w-config-hh2tl"] Nov 25 14:46:24 crc kubenswrapper[4879]: E1125 14:46:24.064552 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36e8400-c985-4fb1-aab9-8cced923f7ed" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064570 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36e8400-c985-4fb1-aab9-8cced923f7ed" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: E1125 14:46:24.064583 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3d76cb1-a568-45e0-ac32-903f49ffbe54" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064590 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3d76cb1-a568-45e0-ac32-903f49ffbe54" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: E1125 14:46:24.064601 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dab1f06c-c032-4e72-8d6b-313eb41c893e" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064607 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dab1f06c-c032-4e72-8d6b-313eb41c893e" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: E1125 14:46:24.064615 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fc80e72-1910-4244-ba55-7046e4a9a5f1" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064621 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fc80e72-1910-4244-ba55-7046e4a9a5f1" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: E1125 14:46:24.064639 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7742abe4-cffe-4f79-b818-75a364174832" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064646 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7742abe4-cffe-4f79-b818-75a364174832" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: E1125 14:46:24.064667 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f1fc15-7c2c-4fa1-94d8-704a644db8bb" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064676 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f1fc15-7c2c-4fa1-94d8-704a644db8bb" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064848 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7742abe4-cffe-4f79-b818-75a364174832" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064885 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fc80e72-1910-4244-ba55-7046e4a9a5f1" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064906 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dab1f06c-c032-4e72-8d6b-313eb41c893e" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064923 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36e8400-c985-4fb1-aab9-8cced923f7ed" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064944 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3d76cb1-a568-45e0-ac32-903f49ffbe54" containerName="mariadb-account-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.064955 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="33f1fc15-7c2c-4fa1-94d8-704a644db8bb" containerName="mariadb-database-create" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.065648 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.070607 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.099144 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gml5w-config-hh2tl"] Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.133433 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x79d\" (UniqueName: \"kubernetes.io/projected/768aa88a-f137-49c0-9a91-59a8c7390018-kube-api-access-4x79d\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.133473 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-scripts\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.133661 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.133749 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-additional-scripts\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.134026 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run-ovn\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.134115 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-log-ovn\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.235872 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-log-ovn\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.235955 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x79d\" (UniqueName: \"kubernetes.io/projected/768aa88a-f137-49c0-9a91-59a8c7390018-kube-api-access-4x79d\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.235982 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-scripts\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.236326 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-log-ovn\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.238045 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-scripts\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.239146 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.239473 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.239548 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-additional-scripts\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.240083 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-additional-scripts\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.240271 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run-ovn\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.240369 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run-ovn\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.272899 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x79d\" (UniqueName: \"kubernetes.io/projected/768aa88a-f137-49c0-9a91-59a8c7390018-kube-api-access-4x79d\") pod \"ovn-controller-gml5w-config-hh2tl\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.381308 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.471146 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-7s5gb"] Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.472397 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.479728 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-qssh2" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.479909 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.494834 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-7s5gb"] Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.549475 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-combined-ca-bundle\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.549530 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-db-sync-config-data\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.549607 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-config-data\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.549844 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4xcz\" (UniqueName: \"kubernetes.io/projected/b010f021-2ab3-424f-910a-68f969e93561-kube-api-access-s4xcz\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.642689 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd","Type":"ContainerStarted","Data":"9d10b0eee6c5fd073f93c286eefe0e8c01139b9655565d8692ef2eddad0fce94"} Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.646877 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.651269 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-combined-ca-bundle\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.651330 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-db-sync-config-data\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.651398 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-config-data\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.651425 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4xcz\" (UniqueName: \"kubernetes.io/projected/b010f021-2ab3-424f-910a-68f969e93561-kube-api-access-s4xcz\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.659038 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-db-sync-config-data\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.660996 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-config-data\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.673780 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4xcz\" (UniqueName: \"kubernetes.io/projected/b010f021-2ab3-424f-910a-68f969e93561-kube-api-access-s4xcz\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.676664 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-combined-ca-bundle\") pod \"glance-db-sync-7s5gb\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.684516 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=39.927643018 podStartE2EDuration="1m20.684491586s" podCreationTimestamp="2025-11-25 14:45:04 +0000 UTC" firstStartedPulling="2025-11-25 14:45:06.437738162 +0000 UTC m=+1198.041151243" lastFinishedPulling="2025-11-25 14:45:47.19458674 +0000 UTC m=+1238.797999811" observedRunningTime="2025-11-25 14:46:24.677814391 +0000 UTC m=+1276.281227462" watchObservedRunningTime="2025-11-25 14:46:24.684491586 +0000 UTC m=+1276.287904657" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.854198 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7s5gb" Nov 25 14:46:24 crc kubenswrapper[4879]: I1125 14:46:24.929643 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gml5w-config-hh2tl"] Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.408980 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-7s5gb"] Nov 25 14:46:25 crc kubenswrapper[4879]: W1125 14:46:25.421351 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb010f021_2ab3_424f_910a_68f969e93561.slice/crio-411f0902495e95066ca6a18c02a22718945bc4513c479781530bd17ead967474 WatchSource:0}: Error finding container 411f0902495e95066ca6a18c02a22718945bc4513c479781530bd17ead967474: Status 404 returned error can't find the container with id 411f0902495e95066ca6a18c02a22718945bc4513c479781530bd17ead967474 Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.655641 4879 generic.go:334] "Generic (PLEG): container finished" podID="768aa88a-f137-49c0-9a91-59a8c7390018" containerID="fc769144da28d1f800fcf592ed4bddb3bf523a034439a16fc93e6ad5fd5a7fb5" exitCode=0 Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.659955 4879 generic.go:334] "Generic (PLEG): container finished" podID="480a7b19-c9e0-41c2-b4cd-af083572f083" containerID="66d1cb297f19e2391f50e7a7f3d03631d216851e0f2b568c9abf77918731b2c4" exitCode=0 Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.660886 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w-config-hh2tl" event={"ID":"768aa88a-f137-49c0-9a91-59a8c7390018","Type":"ContainerDied","Data":"fc769144da28d1f800fcf592ed4bddb3bf523a034439a16fc93e6ad5fd5a7fb5"} Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.660935 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w-config-hh2tl" event={"ID":"768aa88a-f137-49c0-9a91-59a8c7390018","Type":"ContainerStarted","Data":"b528e3349c0b3b524bbcd268b21dd873428d10f61b6b301c56d41ce46cd28d1c"} Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.660949 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-cjhrw" event={"ID":"480a7b19-c9e0-41c2-b4cd-af083572f083","Type":"ContainerDied","Data":"66d1cb297f19e2391f50e7a7f3d03631d216851e0f2b568c9abf77918731b2c4"} Nov 25 14:46:25 crc kubenswrapper[4879]: I1125 14:46:25.662597 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7s5gb" event={"ID":"b010f021-2ab3-424f-910a-68f969e93561","Type":"ContainerStarted","Data":"411f0902495e95066ca6a18c02a22718945bc4513c479781530bd17ead967474"} Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.127741 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.133950 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.204048 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-scripts\") pod \"768aa88a-f137-49c0-9a91-59a8c7390018\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.204311 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-ring-data-devices\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.204341 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb6zr\" (UniqueName: \"kubernetes.io/projected/480a7b19-c9e0-41c2-b4cd-af083572f083-kube-api-access-xb6zr\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.204362 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-combined-ca-bundle\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205409 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-log-ovn\") pod \"768aa88a-f137-49c0-9a91-59a8c7390018\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205486 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-swiftconf\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205538 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/480a7b19-c9e0-41c2-b4cd-af083572f083-etc-swift\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205559 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-scripts\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205595 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-dispersionconf\") pod \"480a7b19-c9e0-41c2-b4cd-af083572f083\" (UID: \"480a7b19-c9e0-41c2-b4cd-af083572f083\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205621 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run-ovn\") pod \"768aa88a-f137-49c0-9a91-59a8c7390018\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205642 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run\") pod \"768aa88a-f137-49c0-9a91-59a8c7390018\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205647 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-scripts" (OuterVolumeSpecName: "scripts") pod "768aa88a-f137-49c0-9a91-59a8c7390018" (UID: "768aa88a-f137-49c0-9a91-59a8c7390018"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205707 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-additional-scripts\") pod \"768aa88a-f137-49c0-9a91-59a8c7390018\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.205736 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x79d\" (UniqueName: \"kubernetes.io/projected/768aa88a-f137-49c0-9a91-59a8c7390018-kube-api-access-4x79d\") pod \"768aa88a-f137-49c0-9a91-59a8c7390018\" (UID: \"768aa88a-f137-49c0-9a91-59a8c7390018\") " Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206046 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206084 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206297 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "768aa88a-f137-49c0-9a91-59a8c7390018" (UID: "768aa88a-f137-49c0-9a91-59a8c7390018"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206392 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run" (OuterVolumeSpecName: "var-run") pod "768aa88a-f137-49c0-9a91-59a8c7390018" (UID: "768aa88a-f137-49c0-9a91-59a8c7390018"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206388 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "768aa88a-f137-49c0-9a91-59a8c7390018" (UID: "768aa88a-f137-49c0-9a91-59a8c7390018"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206844 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "768aa88a-f137-49c0-9a91-59a8c7390018" (UID: "768aa88a-f137-49c0-9a91-59a8c7390018"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.206953 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480a7b19-c9e0-41c2-b4cd-af083572f083-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.210634 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/768aa88a-f137-49c0-9a91-59a8c7390018-kube-api-access-4x79d" (OuterVolumeSpecName: "kube-api-access-4x79d") pod "768aa88a-f137-49c0-9a91-59a8c7390018" (UID: "768aa88a-f137-49c0-9a91-59a8c7390018"). InnerVolumeSpecName "kube-api-access-4x79d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.212598 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.218896 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/480a7b19-c9e0-41c2-b4cd-af083572f083-kube-api-access-xb6zr" (OuterVolumeSpecName: "kube-api-access-xb6zr") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "kube-api-access-xb6zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.228735 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-scripts" (OuterVolumeSpecName: "scripts") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.230088 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.239272 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "480a7b19-c9e0-41c2-b4cd-af083572f083" (UID: "480a7b19-c9e0-41c2-b4cd-af083572f083"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314111 4879 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-ring-data-devices\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314175 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb6zr\" (UniqueName: \"kubernetes.io/projected/480a7b19-c9e0-41c2-b4cd-af083572f083-kube-api-access-xb6zr\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314216 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314231 4879 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314243 4879 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-swiftconf\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314254 4879 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/480a7b19-c9e0-41c2-b4cd-af083572f083-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.314988 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/480a7b19-c9e0-41c2-b4cd-af083572f083-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.315015 4879 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/480a7b19-c9e0-41c2-b4cd-af083572f083-dispersionconf\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.315027 4879 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.315040 4879 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/768aa88a-f137-49c0-9a91-59a8c7390018-var-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.315052 4879 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/768aa88a-f137-49c0-9a91-59a8c7390018-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.315063 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x79d\" (UniqueName: \"kubernetes.io/projected/768aa88a-f137-49c0-9a91-59a8c7390018-kube-api-access-4x79d\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.680324 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w-config-hh2tl" event={"ID":"768aa88a-f137-49c0-9a91-59a8c7390018","Type":"ContainerDied","Data":"b528e3349c0b3b524bbcd268b21dd873428d10f61b6b301c56d41ce46cd28d1c"} Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.680603 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b528e3349c0b3b524bbcd268b21dd873428d10f61b6b301c56d41ce46cd28d1c" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.680652 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-hh2tl" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.683589 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-cjhrw" event={"ID":"480a7b19-c9e0-41c2-b4cd-af083572f083","Type":"ContainerDied","Data":"07b7b4c8ed79391439cfbf95568de07b9dd1a263681df2fc3505b58f150f41b0"} Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.683633 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07b7b4c8ed79391439cfbf95568de07b9dd1a263681df2fc3505b58f150f41b0" Nov 25 14:46:27 crc kubenswrapper[4879]: I1125 14:46:27.683693 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-cjhrw" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.253650 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gml5w-config-hh2tl"] Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.260436 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gml5w-config-hh2tl"] Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.393546 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-gml5w-config-w7zzb"] Nov 25 14:46:28 crc kubenswrapper[4879]: E1125 14:46:28.398558 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480a7b19-c9e0-41c2-b4cd-af083572f083" containerName="swift-ring-rebalance" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.398581 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="480a7b19-c9e0-41c2-b4cd-af083572f083" containerName="swift-ring-rebalance" Nov 25 14:46:28 crc kubenswrapper[4879]: E1125 14:46:28.398604 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768aa88a-f137-49c0-9a91-59a8c7390018" containerName="ovn-config" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.398610 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="768aa88a-f137-49c0-9a91-59a8c7390018" containerName="ovn-config" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.398858 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="480a7b19-c9e0-41c2-b4cd-af083572f083" containerName="swift-ring-rebalance" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.398892 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="768aa88a-f137-49c0-9a91-59a8c7390018" containerName="ovn-config" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.399521 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.402036 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.417410 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gml5w-config-w7zzb"] Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.533707 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd6v2\" (UniqueName: \"kubernetes.io/projected/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-kube-api-access-fd6v2\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.533830 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-additional-scripts\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.533866 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run-ovn\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.533948 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-scripts\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.534114 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.534192 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-log-ovn\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.635634 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.635700 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-log-ovn\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.635765 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd6v2\" (UniqueName: \"kubernetes.io/projected/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-kube-api-access-fd6v2\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.635790 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-additional-scripts\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.635810 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run-ovn\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.635849 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-scripts\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.636002 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.636039 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-log-ovn\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.636261 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run-ovn\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.636754 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-additional-scripts\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.638044 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-scripts\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.659087 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd6v2\" (UniqueName: \"kubernetes.io/projected/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-kube-api-access-fd6v2\") pod \"ovn-controller-gml5w-config-w7zzb\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.731913 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-gml5w" Nov 25 14:46:28 crc kubenswrapper[4879]: I1125 14:46:28.735252 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:29 crc kubenswrapper[4879]: I1125 14:46:29.063250 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-gml5w-config-w7zzb"] Nov 25 14:46:29 crc kubenswrapper[4879]: I1125 14:46:29.659209 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="768aa88a-f137-49c0-9a91-59a8c7390018" path="/var/lib/kubelet/pods/768aa88a-f137-49c0-9a91-59a8c7390018/volumes" Nov 25 14:46:29 crc kubenswrapper[4879]: I1125 14:46:29.704790 4879 generic.go:334] "Generic (PLEG): container finished" podID="4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" containerID="c0a7c3847a43fe9b918c2eab99e478939ca48e5538407ab69dfa631a6c8584a3" exitCode=0 Nov 25 14:46:29 crc kubenswrapper[4879]: I1125 14:46:29.704859 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w-config-w7zzb" event={"ID":"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661","Type":"ContainerDied","Data":"c0a7c3847a43fe9b918c2eab99e478939ca48e5538407ab69dfa631a6c8584a3"} Nov 25 14:46:29 crc kubenswrapper[4879]: I1125 14:46:29.704909 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w-config-w7zzb" event={"ID":"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661","Type":"ContainerStarted","Data":"6a9a55b33fef542519ab3edba8dd45e3ea6ee0447ea976e49dc54900b61b6f07"} Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.011747 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.084476 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run-ovn\") pod \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085111 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-log-ovn\") pod \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085230 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-scripts\") pod \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085262 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd6v2\" (UniqueName: \"kubernetes.io/projected/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-kube-api-access-fd6v2\") pod \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085298 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-additional-scripts\") pod \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085332 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run\") pod \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\" (UID: \"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661\") " Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.084609 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" (UID: "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085509 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" (UID: "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085586 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run" (OuterVolumeSpecName: "var-run") pod "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" (UID: "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085990 4879 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.085997 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" (UID: "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.086012 4879 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.086025 4879 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.086304 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-scripts" (OuterVolumeSpecName: "scripts") pod "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" (UID: "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.102621 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-kube-api-access-fd6v2" (OuterVolumeSpecName: "kube-api-access-fd6v2") pod "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" (UID: "4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661"). InnerVolumeSpecName "kube-api-access-fd6v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.188256 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.188302 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd6v2\" (UniqueName: \"kubernetes.io/projected/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-kube-api-access-fd6v2\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.188318 4879 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.721486 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w-config-w7zzb" event={"ID":"4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661","Type":"ContainerDied","Data":"6a9a55b33fef542519ab3edba8dd45e3ea6ee0447ea976e49dc54900b61b6f07"} Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.721524 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a9a55b33fef542519ab3edba8dd45e3ea6ee0447ea976e49dc54900b61b6f07" Nov 25 14:46:31 crc kubenswrapper[4879]: I1125 14:46:31.721524 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w-config-w7zzb" Nov 25 14:46:32 crc kubenswrapper[4879]: I1125 14:46:32.099159 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gml5w-config-w7zzb"] Nov 25 14:46:32 crc kubenswrapper[4879]: I1125 14:46:32.111097 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gml5w-config-w7zzb"] Nov 25 14:46:33 crc kubenswrapper[4879]: I1125 14:46:33.655162 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" path="/var/lib/kubelet/pods/4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661/volumes" Nov 25 14:46:34 crc kubenswrapper[4879]: I1125 14:46:34.137057 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:34 crc kubenswrapper[4879]: I1125 14:46:34.151929 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"swift-storage-0\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " pod="openstack/swift-storage-0" Nov 25 14:46:34 crc kubenswrapper[4879]: I1125 14:46:34.312099 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 25 14:46:35 crc kubenswrapper[4879]: I1125 14:46:35.866344 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.273878 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lrtc8"] Nov 25 14:46:36 crc kubenswrapper[4879]: E1125 14:46:36.274366 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" containerName="ovn-config" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.274394 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" containerName="ovn-config" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.274653 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c66b0eb-5a6f-4030-b2ba-f87cc7bd1661" containerName="ovn-config" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.275566 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.298976 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lrtc8"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.329385 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-97a3-account-create-l8gnv"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.330470 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.339743 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.378246 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97317c8e-3668-45a4-84d5-a91b4a586e81-operator-scripts\") pod \"cinder-db-create-lrtc8\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.378298 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfczk\" (UniqueName: \"kubernetes.io/projected/97317c8e-3668-45a4-84d5-a91b4a586e81-kube-api-access-lfczk\") pod \"cinder-db-create-lrtc8\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.410258 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-hnsgr"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.411287 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.419113 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-hnsgr"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.461540 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-97a3-account-create-l8gnv"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.469331 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.481073 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7bh6\" (UniqueName: \"kubernetes.io/projected/7827b848-c65a-4cab-8768-a435040aed03-kube-api-access-k7bh6\") pod \"barbican-db-create-hnsgr\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.481181 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zgm9\" (UniqueName: \"kubernetes.io/projected/032be742-1ad2-493b-aeda-974292114a3c-kube-api-access-8zgm9\") pod \"barbican-97a3-account-create-l8gnv\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.481219 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/032be742-1ad2-493b-aeda-974292114a3c-operator-scripts\") pod \"barbican-97a3-account-create-l8gnv\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.481236 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7827b848-c65a-4cab-8768-a435040aed03-operator-scripts\") pod \"barbican-db-create-hnsgr\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.481273 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97317c8e-3668-45a4-84d5-a91b4a586e81-operator-scripts\") pod \"cinder-db-create-lrtc8\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.481291 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfczk\" (UniqueName: \"kubernetes.io/projected/97317c8e-3668-45a4-84d5-a91b4a586e81-kube-api-access-lfczk\") pod \"cinder-db-create-lrtc8\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.482269 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97317c8e-3668-45a4-84d5-a91b4a586e81-operator-scripts\") pod \"cinder-db-create-lrtc8\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.525068 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfczk\" (UniqueName: \"kubernetes.io/projected/97317c8e-3668-45a4-84d5-a91b4a586e81-kube-api-access-lfczk\") pod \"cinder-db-create-lrtc8\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.586175 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/032be742-1ad2-493b-aeda-974292114a3c-operator-scripts\") pod \"barbican-97a3-account-create-l8gnv\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.586230 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7827b848-c65a-4cab-8768-a435040aed03-operator-scripts\") pod \"barbican-db-create-hnsgr\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.586375 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7bh6\" (UniqueName: \"kubernetes.io/projected/7827b848-c65a-4cab-8768-a435040aed03-kube-api-access-k7bh6\") pod \"barbican-db-create-hnsgr\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.586519 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zgm9\" (UniqueName: \"kubernetes.io/projected/032be742-1ad2-493b-aeda-974292114a3c-kube-api-access-8zgm9\") pod \"barbican-97a3-account-create-l8gnv\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.588235 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/032be742-1ad2-493b-aeda-974292114a3c-operator-scripts\") pod \"barbican-97a3-account-create-l8gnv\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.589163 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7827b848-c65a-4cab-8768-a435040aed03-operator-scripts\") pod \"barbican-db-create-hnsgr\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.602595 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.618388 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-6256-account-create-kl4pl"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.619701 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.623593 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.634596 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-72m5s"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.635702 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.649341 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.649560 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.649779 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mzjv2" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.655292 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.657086 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7bh6\" (UniqueName: \"kubernetes.io/projected/7827b848-c65a-4cab-8768-a435040aed03-kube-api-access-k7bh6\") pod \"barbican-db-create-hnsgr\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.664041 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-72m5s"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.668174 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zgm9\" (UniqueName: \"kubernetes.io/projected/032be742-1ad2-493b-aeda-974292114a3c-kube-api-access-8zgm9\") pod \"barbican-97a3-account-create-l8gnv\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.689410 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-combined-ca-bundle\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.689521 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8znh\" (UniqueName: \"kubernetes.io/projected/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-kube-api-access-g8znh\") pod \"cinder-6256-account-create-kl4pl\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.689557 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txmht\" (UniqueName: \"kubernetes.io/projected/028ab6a4-5cba-48ec-af0f-6cc019d46e60-kube-api-access-txmht\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.689619 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-operator-scripts\") pod \"cinder-6256-account-create-kl4pl\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.689666 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-config-data\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.690902 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6256-account-create-kl4pl"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.716884 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-49whm"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.718831 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.734626 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.739595 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-49whm"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.790073 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b847-account-create-kqzp9"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.791401 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.791849 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-operator-scripts\") pod \"cinder-6256-account-create-kl4pl\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.791890 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-config-data\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.791932 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjmqf\" (UniqueName: \"kubernetes.io/projected/40700243-955b-472a-8d05-4a3284152528-kube-api-access-kjmqf\") pod \"neutron-db-create-49whm\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.791960 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40700243-955b-472a-8d05-4a3284152528-operator-scripts\") pod \"neutron-db-create-49whm\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.791997 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-combined-ca-bundle\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.792051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8znh\" (UniqueName: \"kubernetes.io/projected/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-kube-api-access-g8znh\") pod \"cinder-6256-account-create-kl4pl\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.792076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txmht\" (UniqueName: \"kubernetes.io/projected/028ab6a4-5cba-48ec-af0f-6cc019d46e60-kube-api-access-txmht\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.793540 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-operator-scripts\") pod \"cinder-6256-account-create-kl4pl\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.795557 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.796110 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-combined-ca-bundle\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.801478 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-config-data\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.813835 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b847-account-create-kqzp9"] Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.819696 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txmht\" (UniqueName: \"kubernetes.io/projected/028ab6a4-5cba-48ec-af0f-6cc019d46e60-kube-api-access-txmht\") pod \"keystone-db-sync-72m5s\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.833434 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8znh\" (UniqueName: \"kubernetes.io/projected/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-kube-api-access-g8znh\") pod \"cinder-6256-account-create-kl4pl\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.893325 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45f445eb-09f2-4dcc-b4a1-2728239cd955-operator-scripts\") pod \"neutron-b847-account-create-kqzp9\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.893408 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjmqf\" (UniqueName: \"kubernetes.io/projected/40700243-955b-472a-8d05-4a3284152528-kube-api-access-kjmqf\") pod \"neutron-db-create-49whm\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.893446 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40700243-955b-472a-8d05-4a3284152528-operator-scripts\") pod \"neutron-db-create-49whm\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.893483 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szd7q\" (UniqueName: \"kubernetes.io/projected/45f445eb-09f2-4dcc-b4a1-2728239cd955-kube-api-access-szd7q\") pod \"neutron-b847-account-create-kqzp9\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.894346 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40700243-955b-472a-8d05-4a3284152528-operator-scripts\") pod \"neutron-db-create-49whm\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.910432 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjmqf\" (UniqueName: \"kubernetes.io/projected/40700243-955b-472a-8d05-4a3284152528-kube-api-access-kjmqf\") pod \"neutron-db-create-49whm\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " pod="openstack/neutron-db-create-49whm" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.958619 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.973184 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.994765 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szd7q\" (UniqueName: \"kubernetes.io/projected/45f445eb-09f2-4dcc-b4a1-2728239cd955-kube-api-access-szd7q\") pod \"neutron-b847-account-create-kqzp9\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.994935 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45f445eb-09f2-4dcc-b4a1-2728239cd955-operator-scripts\") pod \"neutron-b847-account-create-kqzp9\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:36 crc kubenswrapper[4879]: I1125 14:46:36.995703 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45f445eb-09f2-4dcc-b4a1-2728239cd955-operator-scripts\") pod \"neutron-b847-account-create-kqzp9\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:37 crc kubenswrapper[4879]: I1125 14:46:37.012387 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szd7q\" (UniqueName: \"kubernetes.io/projected/45f445eb-09f2-4dcc-b4a1-2728239cd955-kube-api-access-szd7q\") pod \"neutron-b847-account-create-kqzp9\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:37 crc kubenswrapper[4879]: I1125 14:46:37.055737 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-72m5s" Nov 25 14:46:37 crc kubenswrapper[4879]: I1125 14:46:37.076426 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-49whm" Nov 25 14:46:37 crc kubenswrapper[4879]: I1125 14:46:37.190686 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.553930 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b847-account-create-kqzp9"] Nov 25 14:46:42 crc kubenswrapper[4879]: W1125 14:46:42.557496 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45f445eb_09f2_4dcc_b4a1_2728239cd955.slice/crio-0dbd8d6afa76a111a0dfd35a22b36daaaec4f47229e798a3bf3c7490fd7bc92c WatchSource:0}: Error finding container 0dbd8d6afa76a111a0dfd35a22b36daaaec4f47229e798a3bf3c7490fd7bc92c: Status 404 returned error can't find the container with id 0dbd8d6afa76a111a0dfd35a22b36daaaec4f47229e798a3bf3c7490fd7bc92c Nov 25 14:46:42 crc kubenswrapper[4879]: W1125 14:46:42.560277 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod734e5d8b_907c_4246_adca_6a05a98c0b27.slice/crio-fa4f882c8414a9031e52ed9ed8c1269005df3446fe4a953c62ec3f4473c2d804 WatchSource:0}: Error finding container fa4f882c8414a9031e52ed9ed8c1269005df3446fe4a953c62ec3f4473c2d804: Status 404 returned error can't find the container with id fa4f882c8414a9031e52ed9ed8c1269005df3446fe4a953c62ec3f4473c2d804 Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.567687 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.608196 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-49whm"] Nov 25 14:46:42 crc kubenswrapper[4879]: W1125 14:46:42.611467 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40700243_955b_472a_8d05_4a3284152528.slice/crio-4af747eeaaf1d4b10c720e498b25306527b684a6a42a558652212402e031a43e WatchSource:0}: Error finding container 4af747eeaaf1d4b10c720e498b25306527b684a6a42a558652212402e031a43e: Status 404 returned error can't find the container with id 4af747eeaaf1d4b10c720e498b25306527b684a6a42a558652212402e031a43e Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.618776 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6256-account-create-kl4pl"] Nov 25 14:46:42 crc kubenswrapper[4879]: W1125 14:46:42.619846 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97317c8e_3668_45a4_84d5_a91b4a586e81.slice/crio-6ab2606dda187efa04ef8562dea8c9425783873c60236799c6bf4adce5af6bfc WatchSource:0}: Error finding container 6ab2606dda187efa04ef8562dea8c9425783873c60236799c6bf4adce5af6bfc: Status 404 returned error can't find the container with id 6ab2606dda187efa04ef8562dea8c9425783873c60236799c6bf4adce5af6bfc Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.629962 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lrtc8"] Nov 25 14:46:42 crc kubenswrapper[4879]: E1125 14:46:42.673940 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Nov 25 14:46:42 crc kubenswrapper[4879]: E1125 14:46:42.674272 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s4xcz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-7s5gb_openstack(b010f021-2ab3-424f-910a-68f969e93561): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:46:42 crc kubenswrapper[4879]: E1125 14:46:42.677560 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-7s5gb" podUID="b010f021-2ab3-424f-910a-68f969e93561" Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.743167 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-hnsgr"] Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.750244 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-97a3-account-create-l8gnv"] Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.756473 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-72m5s"] Nov 25 14:46:42 crc kubenswrapper[4879]: W1125 14:46:42.762453 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod028ab6a4_5cba_48ec_af0f_6cc019d46e60.slice/crio-e3fb43aae43c1bdc945e538cb8c09e1a65876e9a89a4dcf21ebf9d3c9f1d7f43 WatchSource:0}: Error finding container e3fb43aae43c1bdc945e538cb8c09e1a65876e9a89a4dcf21ebf9d3c9f1d7f43: Status 404 returned error can't find the container with id e3fb43aae43c1bdc945e538cb8c09e1a65876e9a89a4dcf21ebf9d3c9f1d7f43 Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.889221 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lrtc8" event={"ID":"97317c8e-3668-45a4-84d5-a91b4a586e81","Type":"ContainerStarted","Data":"6ab2606dda187efa04ef8562dea8c9425783873c60236799c6bf4adce5af6bfc"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.890683 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-97a3-account-create-l8gnv" event={"ID":"032be742-1ad2-493b-aeda-974292114a3c","Type":"ContainerStarted","Data":"11862943b9694211454e8f5a3abd72b0d9c2358cabac69742b943740a4dc5d70"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.891650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hnsgr" event={"ID":"7827b848-c65a-4cab-8768-a435040aed03","Type":"ContainerStarted","Data":"68ec0644c4d9860a0a7e21e1e91790d5b979959b8a14bdec7d8bdfb699645639"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.893000 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-72m5s" event={"ID":"028ab6a4-5cba-48ec-af0f-6cc019d46e60","Type":"ContainerStarted","Data":"e3fb43aae43c1bdc945e538cb8c09e1a65876e9a89a4dcf21ebf9d3c9f1d7f43"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.894110 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-49whm" event={"ID":"40700243-955b-472a-8d05-4a3284152528","Type":"ContainerStarted","Data":"4af747eeaaf1d4b10c720e498b25306527b684a6a42a558652212402e031a43e"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.894904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6256-account-create-kl4pl" event={"ID":"d3812c9b-a0f4-4c22-a569-f99b2e1ab610","Type":"ContainerStarted","Data":"98389c5ea4ae431974e1304778ef80581c4632e8fc3fdc0e6ae289903af260fe"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.895660 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b847-account-create-kqzp9" event={"ID":"45f445eb-09f2-4dcc-b4a1-2728239cd955","Type":"ContainerStarted","Data":"0dbd8d6afa76a111a0dfd35a22b36daaaec4f47229e798a3bf3c7490fd7bc92c"} Nov 25 14:46:42 crc kubenswrapper[4879]: I1125 14:46:42.896716 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"fa4f882c8414a9031e52ed9ed8c1269005df3446fe4a953c62ec3f4473c2d804"} Nov 25 14:46:42 crc kubenswrapper[4879]: E1125 14:46:42.898190 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-7s5gb" podUID="b010f021-2ab3-424f-910a-68f969e93561" Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.916670 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6256-account-create-kl4pl" event={"ID":"d3812c9b-a0f4-4c22-a569-f99b2e1ab610","Type":"ContainerStarted","Data":"debe7798b9436c2fd4e85e9d3be0ad032cd6fa97b197b874840d667896089e8a"} Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.919471 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b847-account-create-kqzp9" event={"ID":"45f445eb-09f2-4dcc-b4a1-2728239cd955","Type":"ContainerStarted","Data":"55b2d5a565b513c07e018ca250204a029bcc3a613837014a192a04cf01b5b836"} Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.921589 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lrtc8" event={"ID":"97317c8e-3668-45a4-84d5-a91b4a586e81","Type":"ContainerStarted","Data":"edf8e81a61cd702c70a2c3934e932de736641ae6fa5c76a23b60ee36e54d3b5f"} Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.923872 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-97a3-account-create-l8gnv" event={"ID":"032be742-1ad2-493b-aeda-974292114a3c","Type":"ContainerStarted","Data":"12598c3acdb3699a42bbab7f4f6aec79e1cae59bae5358e126e6804e1b221dff"} Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.925072 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hnsgr" event={"ID":"7827b848-c65a-4cab-8768-a435040aed03","Type":"ContainerStarted","Data":"8f250436d4a2c6eef379ef9363ed87b3aad3f00f51ff3d000d658c53000309df"} Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.940637 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-6256-account-create-kl4pl" podStartSLOduration=8.940612625 podStartE2EDuration="8.940612625s" podCreationTimestamp="2025-11-25 14:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:46:44.933000785 +0000 UTC m=+1296.536413876" watchObservedRunningTime="2025-11-25 14:46:44.940612625 +0000 UTC m=+1296.544025696" Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.949604 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-lrtc8" podStartSLOduration=8.949587231 podStartE2EDuration="8.949587231s" podCreationTimestamp="2025-11-25 14:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:46:44.947690699 +0000 UTC m=+1296.551103770" watchObservedRunningTime="2025-11-25 14:46:44.949587231 +0000 UTC m=+1296.553000302" Nov 25 14:46:44 crc kubenswrapper[4879]: I1125 14:46:44.966146 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-b847-account-create-kqzp9" podStartSLOduration=8.966106785000001 podStartE2EDuration="8.966106785s" podCreationTimestamp="2025-11-25 14:46:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:46:44.962039163 +0000 UTC m=+1296.565452254" watchObservedRunningTime="2025-11-25 14:46:44.966106785 +0000 UTC m=+1296.569519856" Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.938423 4879 generic.go:334] "Generic (PLEG): container finished" podID="032be742-1ad2-493b-aeda-974292114a3c" containerID="12598c3acdb3699a42bbab7f4f6aec79e1cae59bae5358e126e6804e1b221dff" exitCode=0 Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.938518 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-97a3-account-create-l8gnv" event={"ID":"032be742-1ad2-493b-aeda-974292114a3c","Type":"ContainerDied","Data":"12598c3acdb3699a42bbab7f4f6aec79e1cae59bae5358e126e6804e1b221dff"} Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.940985 4879 generic.go:334] "Generic (PLEG): container finished" podID="7827b848-c65a-4cab-8768-a435040aed03" containerID="8f250436d4a2c6eef379ef9363ed87b3aad3f00f51ff3d000d658c53000309df" exitCode=0 Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.941030 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hnsgr" event={"ID":"7827b848-c65a-4cab-8768-a435040aed03","Type":"ContainerDied","Data":"8f250436d4a2c6eef379ef9363ed87b3aad3f00f51ff3d000d658c53000309df"} Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.942783 4879 generic.go:334] "Generic (PLEG): container finished" podID="40700243-955b-472a-8d05-4a3284152528" containerID="0aa845f6803c9eb24a06233769b5289019ad7726632f483f0ee6f162d8982f30" exitCode=0 Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.942894 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-49whm" event={"ID":"40700243-955b-472a-8d05-4a3284152528","Type":"ContainerDied","Data":"0aa845f6803c9eb24a06233769b5289019ad7726632f483f0ee6f162d8982f30"} Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.944596 4879 generic.go:334] "Generic (PLEG): container finished" podID="d3812c9b-a0f4-4c22-a569-f99b2e1ab610" containerID="debe7798b9436c2fd4e85e9d3be0ad032cd6fa97b197b874840d667896089e8a" exitCode=0 Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.944653 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6256-account-create-kl4pl" event={"ID":"d3812c9b-a0f4-4c22-a569-f99b2e1ab610","Type":"ContainerDied","Data":"debe7798b9436c2fd4e85e9d3be0ad032cd6fa97b197b874840d667896089e8a"} Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.945992 4879 generic.go:334] "Generic (PLEG): container finished" podID="45f445eb-09f2-4dcc-b4a1-2728239cd955" containerID="55b2d5a565b513c07e018ca250204a029bcc3a613837014a192a04cf01b5b836" exitCode=0 Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.946082 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b847-account-create-kqzp9" event={"ID":"45f445eb-09f2-4dcc-b4a1-2728239cd955","Type":"ContainerDied","Data":"55b2d5a565b513c07e018ca250204a029bcc3a613837014a192a04cf01b5b836"} Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.948412 4879 generic.go:334] "Generic (PLEG): container finished" podID="97317c8e-3668-45a4-84d5-a91b4a586e81" containerID="edf8e81a61cd702c70a2c3934e932de736641ae6fa5c76a23b60ee36e54d3b5f" exitCode=0 Nov 25 14:46:45 crc kubenswrapper[4879]: I1125 14:46:45.948464 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lrtc8" event={"ID":"97317c8e-3668-45a4-84d5-a91b4a586e81","Type":"ContainerDied","Data":"edf8e81a61cd702c70a2c3934e932de736641ae6fa5c76a23b60ee36e54d3b5f"} Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.307911 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.323039 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szd7q\" (UniqueName: \"kubernetes.io/projected/45f445eb-09f2-4dcc-b4a1-2728239cd955-kube-api-access-szd7q\") pod \"45f445eb-09f2-4dcc-b4a1-2728239cd955\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.323509 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45f445eb-09f2-4dcc-b4a1-2728239cd955-operator-scripts\") pod \"45f445eb-09f2-4dcc-b4a1-2728239cd955\" (UID: \"45f445eb-09f2-4dcc-b4a1-2728239cd955\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.326775 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45f445eb-09f2-4dcc-b4a1-2728239cd955-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "45f445eb-09f2-4dcc-b4a1-2728239cd955" (UID: "45f445eb-09f2-4dcc-b4a1-2728239cd955"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.330766 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45f445eb-09f2-4dcc-b4a1-2728239cd955-kube-api-access-szd7q" (OuterVolumeSpecName: "kube-api-access-szd7q") pod "45f445eb-09f2-4dcc-b4a1-2728239cd955" (UID: "45f445eb-09f2-4dcc-b4a1-2728239cd955"). InnerVolumeSpecName "kube-api-access-szd7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.426281 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szd7q\" (UniqueName: \"kubernetes.io/projected/45f445eb-09f2-4dcc-b4a1-2728239cd955-kube-api-access-szd7q\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.426338 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45f445eb-09f2-4dcc-b4a1-2728239cd955-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.497611 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.527186 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zgm9\" (UniqueName: \"kubernetes.io/projected/032be742-1ad2-493b-aeda-974292114a3c-kube-api-access-8zgm9\") pod \"032be742-1ad2-493b-aeda-974292114a3c\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.527494 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/032be742-1ad2-493b-aeda-974292114a3c-operator-scripts\") pod \"032be742-1ad2-493b-aeda-974292114a3c\" (UID: \"032be742-1ad2-493b-aeda-974292114a3c\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.528088 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/032be742-1ad2-493b-aeda-974292114a3c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "032be742-1ad2-493b-aeda-974292114a3c" (UID: "032be742-1ad2-493b-aeda-974292114a3c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.531843 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/032be742-1ad2-493b-aeda-974292114a3c-kube-api-access-8zgm9" (OuterVolumeSpecName: "kube-api-access-8zgm9") pod "032be742-1ad2-493b-aeda-974292114a3c" (UID: "032be742-1ad2-493b-aeda-974292114a3c"). InnerVolumeSpecName "kube-api-access-8zgm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.539948 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.564752 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.573350 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.585036 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-49whm" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628595 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7bh6\" (UniqueName: \"kubernetes.io/projected/7827b848-c65a-4cab-8768-a435040aed03-kube-api-access-k7bh6\") pod \"7827b848-c65a-4cab-8768-a435040aed03\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628654 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40700243-955b-472a-8d05-4a3284152528-operator-scripts\") pod \"40700243-955b-472a-8d05-4a3284152528\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628694 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97317c8e-3668-45a4-84d5-a91b4a586e81-operator-scripts\") pod \"97317c8e-3668-45a4-84d5-a91b4a586e81\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628759 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfczk\" (UniqueName: \"kubernetes.io/projected/97317c8e-3668-45a4-84d5-a91b4a586e81-kube-api-access-lfczk\") pod \"97317c8e-3668-45a4-84d5-a91b4a586e81\" (UID: \"97317c8e-3668-45a4-84d5-a91b4a586e81\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628829 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-operator-scripts\") pod \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628935 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8znh\" (UniqueName: \"kubernetes.io/projected/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-kube-api-access-g8znh\") pod \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\" (UID: \"d3812c9b-a0f4-4c22-a569-f99b2e1ab610\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.628970 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjmqf\" (UniqueName: \"kubernetes.io/projected/40700243-955b-472a-8d05-4a3284152528-kube-api-access-kjmqf\") pod \"40700243-955b-472a-8d05-4a3284152528\" (UID: \"40700243-955b-472a-8d05-4a3284152528\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629021 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7827b848-c65a-4cab-8768-a435040aed03-operator-scripts\") pod \"7827b848-c65a-4cab-8768-a435040aed03\" (UID: \"7827b848-c65a-4cab-8768-a435040aed03\") " Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629299 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40700243-955b-472a-8d05-4a3284152528-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "40700243-955b-472a-8d05-4a3284152528" (UID: "40700243-955b-472a-8d05-4a3284152528"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629367 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97317c8e-3668-45a4-84d5-a91b4a586e81-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "97317c8e-3668-45a4-84d5-a91b4a586e81" (UID: "97317c8e-3668-45a4-84d5-a91b4a586e81"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629662 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zgm9\" (UniqueName: \"kubernetes.io/projected/032be742-1ad2-493b-aeda-974292114a3c-kube-api-access-8zgm9\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629677 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40700243-955b-472a-8d05-4a3284152528-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629686 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97317c8e-3668-45a4-84d5-a91b4a586e81-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629704 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7827b848-c65a-4cab-8768-a435040aed03-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7827b848-c65a-4cab-8768-a435040aed03" (UID: "7827b848-c65a-4cab-8768-a435040aed03"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.629789 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/032be742-1ad2-493b-aeda-974292114a3c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.630330 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d3812c9b-a0f4-4c22-a569-f99b2e1ab610" (UID: "d3812c9b-a0f4-4c22-a569-f99b2e1ab610"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.632578 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97317c8e-3668-45a4-84d5-a91b4a586e81-kube-api-access-lfczk" (OuterVolumeSpecName: "kube-api-access-lfczk") pod "97317c8e-3668-45a4-84d5-a91b4a586e81" (UID: "97317c8e-3668-45a4-84d5-a91b4a586e81"). InnerVolumeSpecName "kube-api-access-lfczk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.633600 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7827b848-c65a-4cab-8768-a435040aed03-kube-api-access-k7bh6" (OuterVolumeSpecName: "kube-api-access-k7bh6") pod "7827b848-c65a-4cab-8768-a435040aed03" (UID: "7827b848-c65a-4cab-8768-a435040aed03"). InnerVolumeSpecName "kube-api-access-k7bh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.634492 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40700243-955b-472a-8d05-4a3284152528-kube-api-access-kjmqf" (OuterVolumeSpecName: "kube-api-access-kjmqf") pod "40700243-955b-472a-8d05-4a3284152528" (UID: "40700243-955b-472a-8d05-4a3284152528"). InnerVolumeSpecName "kube-api-access-kjmqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.634546 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-kube-api-access-g8znh" (OuterVolumeSpecName: "kube-api-access-g8znh") pod "d3812c9b-a0f4-4c22-a569-f99b2e1ab610" (UID: "d3812c9b-a0f4-4c22-a569-f99b2e1ab610"). InnerVolumeSpecName "kube-api-access-g8znh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.731477 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfczk\" (UniqueName: \"kubernetes.io/projected/97317c8e-3668-45a4-84d5-a91b4a586e81-kube-api-access-lfczk\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.731511 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.731520 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8znh\" (UniqueName: \"kubernetes.io/projected/d3812c9b-a0f4-4c22-a569-f99b2e1ab610-kube-api-access-g8znh\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.731528 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjmqf\" (UniqueName: \"kubernetes.io/projected/40700243-955b-472a-8d05-4a3284152528-kube-api-access-kjmqf\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.731537 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7827b848-c65a-4cab-8768-a435040aed03-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.731545 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7bh6\" (UniqueName: \"kubernetes.io/projected/7827b848-c65a-4cab-8768-a435040aed03-kube-api-access-k7bh6\") on node \"crc\" DevicePath \"\"" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.990058 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-97a3-account-create-l8gnv" event={"ID":"032be742-1ad2-493b-aeda-974292114a3c","Type":"ContainerDied","Data":"11862943b9694211454e8f5a3abd72b0d9c2358cabac69742b943740a4dc5d70"} Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.990111 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11862943b9694211454e8f5a3abd72b0d9c2358cabac69742b943740a4dc5d70" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.990197 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-97a3-account-create-l8gnv" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.996446 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-hnsgr" event={"ID":"7827b848-c65a-4cab-8768-a435040aed03","Type":"ContainerDied","Data":"68ec0644c4d9860a0a7e21e1e91790d5b979959b8a14bdec7d8bdfb699645639"} Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.996488 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68ec0644c4d9860a0a7e21e1e91790d5b979959b8a14bdec7d8bdfb699645639" Nov 25 14:46:49 crc kubenswrapper[4879]: I1125 14:46:49.996546 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-hnsgr" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.001736 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-72m5s" event={"ID":"028ab6a4-5cba-48ec-af0f-6cc019d46e60","Type":"ContainerStarted","Data":"16a1925a0bab70bc1be829bb908cb1c888c4edac0592caffaf1453dc85a92d61"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.005355 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-49whm" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.005395 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-49whm" event={"ID":"40700243-955b-472a-8d05-4a3284152528","Type":"ContainerDied","Data":"4af747eeaaf1d4b10c720e498b25306527b684a6a42a558652212402e031a43e"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.005435 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4af747eeaaf1d4b10c720e498b25306527b684a6a42a558652212402e031a43e" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.007839 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6256-account-create-kl4pl" event={"ID":"d3812c9b-a0f4-4c22-a569-f99b2e1ab610","Type":"ContainerDied","Data":"98389c5ea4ae431974e1304778ef80581c4632e8fc3fdc0e6ae289903af260fe"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.007880 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98389c5ea4ae431974e1304778ef80581c4632e8fc3fdc0e6ae289903af260fe" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.007852 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6256-account-create-kl4pl" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.010634 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b847-account-create-kqzp9" event={"ID":"45f445eb-09f2-4dcc-b4a1-2728239cd955","Type":"ContainerDied","Data":"0dbd8d6afa76a111a0dfd35a22b36daaaec4f47229e798a3bf3c7490fd7bc92c"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.010668 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dbd8d6afa76a111a0dfd35a22b36daaaec4f47229e798a3bf3c7490fd7bc92c" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.010706 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b847-account-create-kqzp9" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.013493 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"20f98654045b89872f5df2f364f5b15e9060829a694cc0678d30a79c4ecdb272"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.013523 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"01d6163416959123eea9546db5e997dc58a1c8fb48cc8df296356f31a71cb2f2"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.019373 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lrtc8" event={"ID":"97317c8e-3668-45a4-84d5-a91b4a586e81","Type":"ContainerDied","Data":"6ab2606dda187efa04ef8562dea8c9425783873c60236799c6bf4adce5af6bfc"} Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.019415 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ab2606dda187efa04ef8562dea8c9425783873c60236799c6bf4adce5af6bfc" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.019483 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lrtc8" Nov 25 14:46:50 crc kubenswrapper[4879]: I1125 14:46:50.028156 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-72m5s" podStartSLOduration=7.499683966 podStartE2EDuration="14.02813811s" podCreationTimestamp="2025-11-25 14:46:36 +0000 UTC" firstStartedPulling="2025-11-25 14:46:42.771049364 +0000 UTC m=+1294.374462435" lastFinishedPulling="2025-11-25 14:46:49.299503508 +0000 UTC m=+1300.902916579" observedRunningTime="2025-11-25 14:46:50.019355039 +0000 UTC m=+1301.622768110" watchObservedRunningTime="2025-11-25 14:46:50.02813811 +0000 UTC m=+1301.631551181" Nov 25 14:46:51 crc kubenswrapper[4879]: I1125 14:46:51.032934 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"0f3579270e9a0136c7f68a5c3e04c11ba1d26b44c0c1ceb3b31b4cbca3cf4ba7"} Nov 25 14:46:51 crc kubenswrapper[4879]: I1125 14:46:51.033401 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"eef1d095ea350449cd4e4c13b9b72afe1590ad3b549d49103ddfa4d450adeab0"} Nov 25 14:46:53 crc kubenswrapper[4879]: I1125 14:46:53.111255 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"0e0569b112382ff911d6d5ffb10cba08cccdc02f2dc893c1ebc01b9c2863ce6b"} Nov 25 14:46:53 crc kubenswrapper[4879]: I1125 14:46:53.111557 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"f2156b4d210c38de5222f44394d8ff73450ee1af32aec0303948ee68935f943c"} Nov 25 14:46:54 crc kubenswrapper[4879]: I1125 14:46:54.128283 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"78e26b5f462da8135c185dc405d7bb3e40a86f3a0a756b228397651aec46fca7"} Nov 25 14:46:54 crc kubenswrapper[4879]: I1125 14:46:54.129783 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"78e8c927e3ebbf38f18921232f416853c52caaa147f9bd6e0b42cb5c79ac392f"} Nov 25 14:47:00 crc kubenswrapper[4879]: I1125 14:47:00.213819 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"44f30f9bf7a2177883d8a6f1b7f870687899a5c37317a986aa4bf85dbf743403"} Nov 25 14:47:00 crc kubenswrapper[4879]: I1125 14:47:00.214274 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"49bdf74c01b8d49e4758eb2d1f183fd8edba4323eaf30b7d8764ca72f601ca8e"} Nov 25 14:47:00 crc kubenswrapper[4879]: I1125 14:47:00.214287 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"5c9cb1ae5da818f9561819c0427fef18049f551233eaa6475166bbfe4e96a29e"} Nov 25 14:47:00 crc kubenswrapper[4879]: I1125 14:47:00.216520 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7s5gb" event={"ID":"b010f021-2ab3-424f-910a-68f969e93561","Type":"ContainerStarted","Data":"c5a3d32bd6664e58ca040b6cfc96b13c945dc2b2fc8f9feefcfd2798ec76fabb"} Nov 25 14:47:00 crc kubenswrapper[4879]: I1125 14:47:00.236956 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-7s5gb" podStartSLOduration=2.191559328 podStartE2EDuration="36.236934369s" podCreationTimestamp="2025-11-25 14:46:24 +0000 UTC" firstStartedPulling="2025-11-25 14:46:25.423720598 +0000 UTC m=+1277.027133669" lastFinishedPulling="2025-11-25 14:46:59.469095639 +0000 UTC m=+1311.072508710" observedRunningTime="2025-11-25 14:47:00.234403329 +0000 UTC m=+1311.837816400" watchObservedRunningTime="2025-11-25 14:47:00.236934369 +0000 UTC m=+1311.840347440" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.253245 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"1a3386a1d224fe22edaf0215700f7f28f92829314b7558a91a246f504e5ef884"} Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.253621 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"cd5d7ea0e9867e8b0fe2167eed20836d54166e4956798392a0fa624050ba2841"} Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.253633 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"d24b61816a0df221f5cca68298192d65a0db4ffd65d2f4a3f373892fc2581637"} Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.253642 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerStarted","Data":"9aa6cecc8f842351e35fda9d74c697191565ff3b682681037963b8761a8ddb66"} Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.256268 4879 generic.go:334] "Generic (PLEG): container finished" podID="028ab6a4-5cba-48ec-af0f-6cc019d46e60" containerID="16a1925a0bab70bc1be829bb908cb1c888c4edac0592caffaf1453dc85a92d61" exitCode=0 Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.256328 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-72m5s" event={"ID":"028ab6a4-5cba-48ec-af0f-6cc019d46e60","Type":"ContainerDied","Data":"16a1925a0bab70bc1be829bb908cb1c888c4edac0592caffaf1453dc85a92d61"} Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.308759 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=43.352924421 podStartE2EDuration="1m0.308725072s" podCreationTimestamp="2025-11-25 14:46:01 +0000 UTC" firstStartedPulling="2025-11-25 14:46:42.562040197 +0000 UTC m=+1294.165453268" lastFinishedPulling="2025-11-25 14:46:59.517840848 +0000 UTC m=+1311.121253919" observedRunningTime="2025-11-25 14:47:01.283249023 +0000 UTC m=+1312.886662104" watchObservedRunningTime="2025-11-25 14:47:01.308725072 +0000 UTC m=+1312.912138143" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616020 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-zz5dk"] Nov 25 14:47:01 crc kubenswrapper[4879]: E1125 14:47:01.616469 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45f445eb-09f2-4dcc-b4a1-2728239cd955" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616490 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="45f445eb-09f2-4dcc-b4a1-2728239cd955" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: E1125 14:47:01.616508 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3812c9b-a0f4-4c22-a569-f99b2e1ab610" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616517 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3812c9b-a0f4-4c22-a569-f99b2e1ab610" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: E1125 14:47:01.616531 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40700243-955b-472a-8d05-4a3284152528" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616539 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="40700243-955b-472a-8d05-4a3284152528" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: E1125 14:47:01.616566 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="032be742-1ad2-493b-aeda-974292114a3c" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616573 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="032be742-1ad2-493b-aeda-974292114a3c" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: E1125 14:47:01.616594 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97317c8e-3668-45a4-84d5-a91b4a586e81" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616601 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="97317c8e-3668-45a4-84d5-a91b4a586e81" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: E1125 14:47:01.616622 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7827b848-c65a-4cab-8768-a435040aed03" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616634 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7827b848-c65a-4cab-8768-a435040aed03" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616832 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="97317c8e-3668-45a4-84d5-a91b4a586e81" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616844 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3812c9b-a0f4-4c22-a569-f99b2e1ab610" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616862 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="40700243-955b-472a-8d05-4a3284152528" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616876 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="45f445eb-09f2-4dcc-b4a1-2728239cd955" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616887 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7827b848-c65a-4cab-8768-a435040aed03" containerName="mariadb-database-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.616900 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="032be742-1ad2-493b-aeda-974292114a3c" containerName="mariadb-account-create" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.618060 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.620215 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.624619 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-zz5dk"] Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.764021 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6z6g\" (UniqueName: \"kubernetes.io/projected/6d080cad-8176-4bf2-811b-362e9f6fd534-kube-api-access-b6z6g\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.764244 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.764347 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.764371 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-config\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.764465 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.764501 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.865805 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6z6g\" (UniqueName: \"kubernetes.io/projected/6d080cad-8176-4bf2-811b-362e9f6fd534-kube-api-access-b6z6g\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.865904 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.865964 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.865988 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-config\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.866032 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.866048 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.866989 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.866989 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.867584 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.867719 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-config\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.868390 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.886018 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6z6g\" (UniqueName: \"kubernetes.io/projected/6d080cad-8176-4bf2-811b-362e9f6fd534-kube-api-access-b6z6g\") pod \"dnsmasq-dns-5c79d794d7-zz5dk\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:01 crc kubenswrapper[4879]: I1125 14:47:01.940263 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.392258 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-zz5dk"] Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.589624 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-72m5s" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.678795 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-config-data\") pod \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.678957 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txmht\" (UniqueName: \"kubernetes.io/projected/028ab6a4-5cba-48ec-af0f-6cc019d46e60-kube-api-access-txmht\") pod \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.679005 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-combined-ca-bundle\") pod \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\" (UID: \"028ab6a4-5cba-48ec-af0f-6cc019d46e60\") " Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.683503 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/028ab6a4-5cba-48ec-af0f-6cc019d46e60-kube-api-access-txmht" (OuterVolumeSpecName: "kube-api-access-txmht") pod "028ab6a4-5cba-48ec-af0f-6cc019d46e60" (UID: "028ab6a4-5cba-48ec-af0f-6cc019d46e60"). InnerVolumeSpecName "kube-api-access-txmht". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.708815 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "028ab6a4-5cba-48ec-af0f-6cc019d46e60" (UID: "028ab6a4-5cba-48ec-af0f-6cc019d46e60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.726436 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-config-data" (OuterVolumeSpecName: "config-data") pod "028ab6a4-5cba-48ec-af0f-6cc019d46e60" (UID: "028ab6a4-5cba-48ec-af0f-6cc019d46e60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.780888 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.780922 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txmht\" (UniqueName: \"kubernetes.io/projected/028ab6a4-5cba-48ec-af0f-6cc019d46e60-kube-api-access-txmht\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:02 crc kubenswrapper[4879]: I1125 14:47:02.780932 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/028ab6a4-5cba-48ec-af0f-6cc019d46e60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.273461 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerID="6696ca0e451df184b4e08d9b8df9543811f46745aee22e64cfcc0713570ca32e" exitCode=0 Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.273540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" event={"ID":"6d080cad-8176-4bf2-811b-362e9f6fd534","Type":"ContainerDied","Data":"6696ca0e451df184b4e08d9b8df9543811f46745aee22e64cfcc0713570ca32e"} Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.273569 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" event={"ID":"6d080cad-8176-4bf2-811b-362e9f6fd534","Type":"ContainerStarted","Data":"45b538ef97c1145a784b0d2749975b010ff8ed7177d8c3b10b510b0f0c66b1d1"} Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.274904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-72m5s" event={"ID":"028ab6a4-5cba-48ec-af0f-6cc019d46e60","Type":"ContainerDied","Data":"e3fb43aae43c1bdc945e538cb8c09e1a65876e9a89a4dcf21ebf9d3c9f1d7f43"} Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.274933 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3fb43aae43c1bdc945e538cb8c09e1a65876e9a89a4dcf21ebf9d3c9f1d7f43" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.274990 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-72m5s" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.538006 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7q9dx"] Nov 25 14:47:03 crc kubenswrapper[4879]: E1125 14:47:03.538459 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="028ab6a4-5cba-48ec-af0f-6cc019d46e60" containerName="keystone-db-sync" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.538477 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="028ab6a4-5cba-48ec-af0f-6cc019d46e60" containerName="keystone-db-sync" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.538753 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="028ab6a4-5cba-48ec-af0f-6cc019d46e60" containerName="keystone-db-sync" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.539564 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.546371 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.546658 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.547291 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.550452 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mzjv2" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.560560 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7q9dx"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.576409 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.599009 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-zz5dk"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.640473 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b868669f-n7wjr"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.650248 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.674678 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-n7wjr"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.701482 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-credential-keys\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.701838 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9zs9\" (UniqueName: \"kubernetes.io/projected/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-kube-api-access-w9zs9\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.701910 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-fernet-keys\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.701944 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-config-data\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.701975 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-scripts\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.702057 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-combined-ca-bundle\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.751288 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.753277 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.761470 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.761691 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.771609 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.779611 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-78w72"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.782590 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.785808 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.789956 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-bstpg"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.790889 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-9fqwh" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.791015 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.796180 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.801227 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.801430 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803707 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803770 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-combined-ca-bundle\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803808 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khtp4\" (UniqueName: \"kubernetes.io/projected/daea822a-3378-47e5-8402-f7e781f369bb-kube-api-access-khtp4\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803850 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-config\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803912 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803934 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-credential-keys\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803969 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9zs9\" (UniqueName: \"kubernetes.io/projected/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-kube-api-access-w9zs9\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.803991 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-svc\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.804066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-fernet-keys\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.804098 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-config-data\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.804134 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.804159 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-scripts\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.819025 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-nxcpq" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.819481 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-78w72"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.849170 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9zs9\" (UniqueName: \"kubernetes.io/projected/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-kube-api-access-w9zs9\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.851540 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-fernet-keys\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.860543 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-config-data\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.860870 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-scripts\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.861190 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-credential-keys\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.871307 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-combined-ca-bundle\") pod \"keystone-bootstrap-7q9dx\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.872113 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bstpg"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.898116 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khtp4\" (UniqueName: \"kubernetes.io/projected/daea822a-3378-47e5-8402-f7e781f369bb-kube-api-access-khtp4\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913725 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-config\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913748 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-scripts\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913800 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-log-httpd\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913835 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46w7z\" (UniqueName: \"kubernetes.io/projected/284aa011-0c93-49d5-a07e-4580b44f1cdc-kube-api-access-46w7z\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913874 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-scripts\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913891 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913906 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913926 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwvs7\" (UniqueName: \"kubernetes.io/projected/ed169ce2-81b3-4579-8f37-f45052a7b15d-kube-api-access-jwvs7\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913969 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-svc\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.913992 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-combined-ca-bundle\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914016 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvzls\" (UniqueName: \"kubernetes.io/projected/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-kube-api-access-rvzls\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914034 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-config-data\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914069 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914154 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed169ce2-81b3-4579-8f37-f45052a7b15d-etc-machine-id\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914179 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914223 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-run-httpd\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914257 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-db-sync-config-data\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914278 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-config-data\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914299 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-combined-ca-bundle\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914317 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.914356 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-config\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.915385 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-config\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.915880 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-sb\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.916863 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-swift-storage-0\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.917392 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-nb\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.917658 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-svc\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.954208 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-b29mr"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.955351 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khtp4\" (UniqueName: \"kubernetes.io/projected/daea822a-3378-47e5-8402-f7e781f369bb-kube-api-access-khtp4\") pod \"dnsmasq-dns-5b868669f-n7wjr\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.955459 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.963099 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.963452 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-427bs" Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.966005 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-b29mr"] Nov 25 14:47:03 crc kubenswrapper[4879]: I1125 14:47:03.991020 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.009108 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-n7wjr"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016526 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016572 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwvs7\" (UniqueName: \"kubernetes.io/projected/ed169ce2-81b3-4579-8f37-f45052a7b15d-kube-api-access-jwvs7\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016607 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-combined-ca-bundle\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016626 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvzls\" (UniqueName: \"kubernetes.io/projected/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-kube-api-access-rvzls\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016640 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-config-data\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016663 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016690 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed169ce2-81b3-4579-8f37-f45052a7b15d-etc-machine-id\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016721 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-run-httpd\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016742 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-db-sync-config-data\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016763 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-config-data\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016785 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-combined-ca-bundle\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016811 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-config\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016847 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-scripts\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016869 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-log-httpd\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016891 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46w7z\" (UniqueName: \"kubernetes.io/projected/284aa011-0c93-49d5-a07e-4580b44f1cdc-kube-api-access-46w7z\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.016913 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-scripts\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.017955 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-run-httpd\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.022861 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed169ce2-81b3-4579-8f37-f45052a7b15d-etc-machine-id\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.024421 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-log-httpd\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.030163 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-zlv8f"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.032160 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.039369 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.039560 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.039734 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vjd28" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.050890 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-scripts\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.057577 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-scripts\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.058499 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.059036 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-db-sync-config-data\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.059640 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-combined-ca-bundle\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.059699 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.060513 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-config-data\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.061721 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-config\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.062529 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-config-data\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.062562 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zlv8f"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.063438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvzls\" (UniqueName: \"kubernetes.io/projected/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-kube-api-access-rvzls\") pod \"ceilometer-0\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.063589 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-combined-ca-bundle\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.064471 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46w7z\" (UniqueName: \"kubernetes.io/projected/284aa011-0c93-49d5-a07e-4580b44f1cdc-kube-api-access-46w7z\") pod \"neutron-db-sync-78w72\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.069661 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwvs7\" (UniqueName: \"kubernetes.io/projected/ed169ce2-81b3-4579-8f37-f45052a7b15d-kube-api-access-jwvs7\") pod \"cinder-db-sync-bstpg\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.089209 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-fn6n8"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.097341 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.109085 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-78w72" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.109968 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.119021 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-combined-ca-bundle\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.119151 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-config-data\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.119618 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-combined-ca-bundle\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.120035 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-db-sync-config-data\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.120200 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lgxv\" (UniqueName: \"kubernetes.io/projected/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-kube-api-access-6lgxv\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.120300 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-scripts\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.120336 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5h62\" (UniqueName: \"kubernetes.io/projected/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-kube-api-access-w5h62\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.120381 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-logs\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.124077 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-fn6n8"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.153705 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bstpg" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224376 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5h62\" (UniqueName: \"kubernetes.io/projected/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-kube-api-access-w5h62\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224624 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-logs\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224680 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-svc\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224725 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-config\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224776 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-combined-ca-bundle\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224808 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224842 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-config-data\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224863 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224899 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq6lf\" (UniqueName: \"kubernetes.io/projected/676c4fde-fa81-48be-82cf-c8aa4baf4d71-kube-api-access-wq6lf\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224960 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-combined-ca-bundle\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.224999 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-db-sync-config-data\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.225059 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.225098 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lgxv\" (UniqueName: \"kubernetes.io/projected/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-kube-api-access-6lgxv\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.225263 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-scripts\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.228480 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-logs\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.231622 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-db-sync-config-data\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.231911 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-combined-ca-bundle\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.232621 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-combined-ca-bundle\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.233810 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-scripts\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.242857 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-config-data\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.243935 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5h62\" (UniqueName: \"kubernetes.io/projected/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-kube-api-access-w5h62\") pod \"placement-db-sync-zlv8f\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.247588 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lgxv\" (UniqueName: \"kubernetes.io/projected/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-kube-api-access-6lgxv\") pod \"barbican-db-sync-b29mr\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.317095 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" event={"ID":"6d080cad-8176-4bf2-811b-362e9f6fd534","Type":"ContainerStarted","Data":"6d2750a928af055f4019a2085bc8f185c5c2ff4fb81962157885f18e26a39983"} Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.317403 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerName="dnsmasq-dns" containerID="cri-o://6d2750a928af055f4019a2085bc8f185c5c2ff4fb81962157885f18e26a39983" gracePeriod=10 Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.317940 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.326356 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.326468 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-svc\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.326492 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-config\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.326534 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.326551 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.326590 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq6lf\" (UniqueName: \"kubernetes.io/projected/676c4fde-fa81-48be-82cf-c8aa4baf4d71-kube-api-access-wq6lf\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.327870 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-swift-storage-0\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.327886 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-config\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.328764 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-svc\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.329044 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-nb\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.329465 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-sb\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.347799 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" podStartSLOduration=3.347782043 podStartE2EDuration="3.347782043s" podCreationTimestamp="2025-11-25 14:47:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:47:04.346611841 +0000 UTC m=+1315.950024912" watchObservedRunningTime="2025-11-25 14:47:04.347782043 +0000 UTC m=+1315.951195114" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.368036 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq6lf\" (UniqueName: \"kubernetes.io/projected/676c4fde-fa81-48be-82cf-c8aa4baf4d71-kube-api-access-wq6lf\") pod \"dnsmasq-dns-cf78879c9-fn6n8\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.471875 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-b29mr" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.494825 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zlv8f" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.515273 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.627652 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7q9dx"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.673634 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-n7wjr"] Nov 25 14:47:04 crc kubenswrapper[4879]: W1125 14:47:04.673630 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddaea822a_3378_47e5_8402_f7e781f369bb.slice/crio-2fa202f34efc4016f0286b48821b0efc9e63ab0ba8d22ff32b81e98a5f1735e9 WatchSource:0}: Error finding container 2fa202f34efc4016f0286b48821b0efc9e63ab0ba8d22ff32b81e98a5f1735e9: Status 404 returned error can't find the container with id 2fa202f34efc4016f0286b48821b0efc9e63ab0ba8d22ff32b81e98a5f1735e9 Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.720289 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-78w72"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.793946 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:47:04 crc kubenswrapper[4879]: I1125 14:47:04.821754 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bstpg"] Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.028938 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zlv8f"] Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.050206 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-b29mr"] Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.169934 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-fn6n8"] Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.346660 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerID="6d2750a928af055f4019a2085bc8f185c5c2ff4fb81962157885f18e26a39983" exitCode=0 Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.347075 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" event={"ID":"6d080cad-8176-4bf2-811b-362e9f6fd534","Type":"ContainerDied","Data":"6d2750a928af055f4019a2085bc8f185c5c2ff4fb81962157885f18e26a39983"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.364960 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" event={"ID":"676c4fde-fa81-48be-82cf-c8aa4baf4d71","Type":"ContainerStarted","Data":"1bd6a9c3ef9188df32097726d505835403b28807ebedcf4950372597ccc0432d"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.367641 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" event={"ID":"daea822a-3378-47e5-8402-f7e781f369bb","Type":"ContainerStarted","Data":"ee38d349a2e9270dd15e541f677a17e72824388fdf79e0745bf6a3ee219ee04c"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.367702 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" event={"ID":"daea822a-3378-47e5-8402-f7e781f369bb","Type":"ContainerStarted","Data":"2fa202f34efc4016f0286b48821b0efc9e63ab0ba8d22ff32b81e98a5f1735e9"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.370999 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-78w72" event={"ID":"284aa011-0c93-49d5-a07e-4580b44f1cdc","Type":"ContainerStarted","Data":"a7831c10e52dea800b5ac0b645f0ad3ee3a1384503fb815d37c65ab355043571"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.371062 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-78w72" event={"ID":"284aa011-0c93-49d5-a07e-4580b44f1cdc","Type":"ContainerStarted","Data":"2b49eece8a74ec12224e338e5f6f3ff8dd7f5e9d70ac66017c1b002268a7412b"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.374068 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bstpg" event={"ID":"ed169ce2-81b3-4579-8f37-f45052a7b15d","Type":"ContainerStarted","Data":"6ee5ed94ee0f6a2595cca6f49f4272207d1fd99affcbfc8ea4a9b67f5a9718b8"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.376313 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerStarted","Data":"24118a9c853e552fb4d66127a1a8e54dfd2d3c8841811158283086bfc836358f"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.377731 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zlv8f" event={"ID":"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e","Type":"ContainerStarted","Data":"72bd3c883061d48e5af00eb66a5bd1ac4d57dbd3d7312a7d0dcd93e1bdce046a"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.379153 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-b29mr" event={"ID":"41a3bd8d-a2e0-401e-b2f6-10f076e3710e","Type":"ContainerStarted","Data":"6cb96ef7b4fd0a6caa5960b7389eb2eec5a09630c40d3dabded933bb4b914fbd"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.381008 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7q9dx" event={"ID":"fc93ab85-eac9-43b8-903d-4cc6696c9e7a","Type":"ContainerStarted","Data":"c9ce651ae7bd83610ac3da78634fbc805214ed291c9f3883f73abaec3e5353d9"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.381042 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7q9dx" event={"ID":"fc93ab85-eac9-43b8-903d-4cc6696c9e7a","Type":"ContainerStarted","Data":"3abac021aca0c46096eb5edfe08f43b501734fa43f0105a783a0c43c70031f42"} Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.389308 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-78w72" podStartSLOduration=2.3892920650000002 podStartE2EDuration="2.389292065s" podCreationTimestamp="2025-11-25 14:47:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:47:05.388594436 +0000 UTC m=+1316.992007517" watchObservedRunningTime="2025-11-25 14:47:05.389292065 +0000 UTC m=+1316.992705136" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.411727 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7q9dx" podStartSLOduration=2.41170552 podStartE2EDuration="2.41170552s" podCreationTimestamp="2025-11-25 14:47:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:47:05.405494 +0000 UTC m=+1317.008907071" watchObservedRunningTime="2025-11-25 14:47:05.41170552 +0000 UTC m=+1317.015118591" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.484082 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.652244 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-nb\") pod \"6d080cad-8176-4bf2-811b-362e9f6fd534\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.653256 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-svc\") pod \"6d080cad-8176-4bf2-811b-362e9f6fd534\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.653278 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-swift-storage-0\") pod \"6d080cad-8176-4bf2-811b-362e9f6fd534\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.653343 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-config\") pod \"6d080cad-8176-4bf2-811b-362e9f6fd534\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.653373 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-sb\") pod \"6d080cad-8176-4bf2-811b-362e9f6fd534\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.653413 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6z6g\" (UniqueName: \"kubernetes.io/projected/6d080cad-8176-4bf2-811b-362e9f6fd534-kube-api-access-b6z6g\") pod \"6d080cad-8176-4bf2-811b-362e9f6fd534\" (UID: \"6d080cad-8176-4bf2-811b-362e9f6fd534\") " Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.659090 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d080cad-8176-4bf2-811b-362e9f6fd534-kube-api-access-b6z6g" (OuterVolumeSpecName: "kube-api-access-b6z6g") pod "6d080cad-8176-4bf2-811b-362e9f6fd534" (UID: "6d080cad-8176-4bf2-811b-362e9f6fd534"). InnerVolumeSpecName "kube-api-access-b6z6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.699607 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6d080cad-8176-4bf2-811b-362e9f6fd534" (UID: "6d080cad-8176-4bf2-811b-362e9f6fd534"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.700202 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6d080cad-8176-4bf2-811b-362e9f6fd534" (UID: "6d080cad-8176-4bf2-811b-362e9f6fd534"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.703370 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-config" (OuterVolumeSpecName: "config") pod "6d080cad-8176-4bf2-811b-362e9f6fd534" (UID: "6d080cad-8176-4bf2-811b-362e9f6fd534"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.704217 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6d080cad-8176-4bf2-811b-362e9f6fd534" (UID: "6d080cad-8176-4bf2-811b-362e9f6fd534"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.708264 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6d080cad-8176-4bf2-811b-362e9f6fd534" (UID: "6d080cad-8176-4bf2-811b-362e9f6fd534"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.755384 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.755428 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.755453 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.755464 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.755476 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6z6g\" (UniqueName: \"kubernetes.io/projected/6d080cad-8176-4bf2-811b-362e9f6fd534-kube-api-access-b6z6g\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.755490 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6d080cad-8176-4bf2-811b-362e9f6fd534-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:05 crc kubenswrapper[4879]: I1125 14:47:05.945221 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.406594 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" event={"ID":"6d080cad-8176-4bf2-811b-362e9f6fd534","Type":"ContainerDied","Data":"45b538ef97c1145a784b0d2749975b010ff8ed7177d8c3b10b510b0f0c66b1d1"} Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.406671 4879 scope.go:117] "RemoveContainer" containerID="6d2750a928af055f4019a2085bc8f185c5c2ff4fb81962157885f18e26a39983" Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.406879 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-zz5dk" Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.421676 4879 generic.go:334] "Generic (PLEG): container finished" podID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerID="4e39a3f09832916ea44593c7f26401450afbe98b669fa37611e1934efc9ad63b" exitCode=0 Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.421773 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" event={"ID":"676c4fde-fa81-48be-82cf-c8aa4baf4d71","Type":"ContainerDied","Data":"4e39a3f09832916ea44593c7f26401450afbe98b669fa37611e1934efc9ad63b"} Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.424696 4879 generic.go:334] "Generic (PLEG): container finished" podID="daea822a-3378-47e5-8402-f7e781f369bb" containerID="ee38d349a2e9270dd15e541f677a17e72824388fdf79e0745bf6a3ee219ee04c" exitCode=0 Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.424980 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" event={"ID":"daea822a-3378-47e5-8402-f7e781f369bb","Type":"ContainerDied","Data":"ee38d349a2e9270dd15e541f677a17e72824388fdf79e0745bf6a3ee219ee04c"} Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.603851 4879 scope.go:117] "RemoveContainer" containerID="6696ca0e451df184b4e08d9b8df9543811f46745aee22e64cfcc0713570ca32e" Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.616794 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-zz5dk"] Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.630509 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-zz5dk"] Nov 25 14:47:06 crc kubenswrapper[4879]: I1125 14:47:06.841684 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.005110 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-config\") pod \"daea822a-3378-47e5-8402-f7e781f369bb\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.005299 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-nb\") pod \"daea822a-3378-47e5-8402-f7e781f369bb\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.005359 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-svc\") pod \"daea822a-3378-47e5-8402-f7e781f369bb\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.005462 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-swift-storage-0\") pod \"daea822a-3378-47e5-8402-f7e781f369bb\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.005479 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-sb\") pod \"daea822a-3378-47e5-8402-f7e781f369bb\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.005505 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khtp4\" (UniqueName: \"kubernetes.io/projected/daea822a-3378-47e5-8402-f7e781f369bb-kube-api-access-khtp4\") pod \"daea822a-3378-47e5-8402-f7e781f369bb\" (UID: \"daea822a-3378-47e5-8402-f7e781f369bb\") " Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.026210 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daea822a-3378-47e5-8402-f7e781f369bb-kube-api-access-khtp4" (OuterVolumeSpecName: "kube-api-access-khtp4") pod "daea822a-3378-47e5-8402-f7e781f369bb" (UID: "daea822a-3378-47e5-8402-f7e781f369bb"). InnerVolumeSpecName "kube-api-access-khtp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.029802 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "daea822a-3378-47e5-8402-f7e781f369bb" (UID: "daea822a-3378-47e5-8402-f7e781f369bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.033930 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "daea822a-3378-47e5-8402-f7e781f369bb" (UID: "daea822a-3378-47e5-8402-f7e781f369bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.039601 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "daea822a-3378-47e5-8402-f7e781f369bb" (UID: "daea822a-3378-47e5-8402-f7e781f369bb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.054512 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "daea822a-3378-47e5-8402-f7e781f369bb" (UID: "daea822a-3378-47e5-8402-f7e781f369bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.064661 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-config" (OuterVolumeSpecName: "config") pod "daea822a-3378-47e5-8402-f7e781f369bb" (UID: "daea822a-3378-47e5-8402-f7e781f369bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.110207 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.110249 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.110264 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.110275 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khtp4\" (UniqueName: \"kubernetes.io/projected/daea822a-3378-47e5-8402-f7e781f369bb-kube-api-access-khtp4\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.110286 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.110297 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daea822a-3378-47e5-8402-f7e781f369bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.444041 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" event={"ID":"daea822a-3378-47e5-8402-f7e781f369bb","Type":"ContainerDied","Data":"2fa202f34efc4016f0286b48821b0efc9e63ab0ba8d22ff32b81e98a5f1735e9"} Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.444092 4879 scope.go:117] "RemoveContainer" containerID="ee38d349a2e9270dd15e541f677a17e72824388fdf79e0745bf6a3ee219ee04c" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.444237 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b868669f-n7wjr" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.503096 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-n7wjr"] Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.513153 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b868669f-n7wjr"] Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.657718 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" path="/var/lib/kubelet/pods/6d080cad-8176-4bf2-811b-362e9f6fd534/volumes" Nov 25 14:47:07 crc kubenswrapper[4879]: I1125 14:47:07.658649 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daea822a-3378-47e5-8402-f7e781f369bb" path="/var/lib/kubelet/pods/daea822a-3378-47e5-8402-f7e781f369bb/volumes" Nov 25 14:47:08 crc kubenswrapper[4879]: I1125 14:47:08.458160 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" event={"ID":"676c4fde-fa81-48be-82cf-c8aa4baf4d71","Type":"ContainerStarted","Data":"1feca86685f4db2898b53b2d74a33b811cc2699d97b2d5d9c9274b9e6bd05285"} Nov 25 14:47:09 crc kubenswrapper[4879]: I1125 14:47:09.468309 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:09 crc kubenswrapper[4879]: I1125 14:47:09.493562 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" podStartSLOduration=5.493539858 podStartE2EDuration="5.493539858s" podCreationTimestamp="2025-11-25 14:47:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:47:09.485351523 +0000 UTC m=+1321.088764604" watchObservedRunningTime="2025-11-25 14:47:09.493539858 +0000 UTC m=+1321.096952949" Nov 25 14:47:14 crc kubenswrapper[4879]: I1125 14:47:14.517882 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:47:14 crc kubenswrapper[4879]: I1125 14:47:14.583655 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-xb84j"] Nov 25 14:47:14 crc kubenswrapper[4879]: I1125 14:47:14.583915 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" containerID="cri-o://41c2542e53d215903d6489c638cfa4d00898988a7f57f64c5234d8ed55e5482a" gracePeriod=10 Nov 25 14:47:16 crc kubenswrapper[4879]: I1125 14:47:16.269938 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Nov 25 14:47:21 crc kubenswrapper[4879]: I1125 14:47:21.269955 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Nov 25 14:47:23 crc kubenswrapper[4879]: I1125 14:47:23.990675 4879 generic.go:334] "Generic (PLEG): container finished" podID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerID="41c2542e53d215903d6489c638cfa4d00898988a7f57f64c5234d8ed55e5482a" exitCode=0 Nov 25 14:47:23 crc kubenswrapper[4879]: I1125 14:47:23.990841 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" event={"ID":"89f9ccb7-32af-4de5-b9a7-8771989d8a1b","Type":"ContainerDied","Data":"41c2542e53d215903d6489c638cfa4d00898988a7f57f64c5234d8ed55e5482a"} Nov 25 14:47:26 crc kubenswrapper[4879]: I1125 14:47:26.270155 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: connect: connection refused" Nov 25 14:47:26 crc kubenswrapper[4879]: I1125 14:47:26.270285 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:47:30 crc kubenswrapper[4879]: I1125 14:47:30.041404 4879 generic.go:334] "Generic (PLEG): container finished" podID="fc93ab85-eac9-43b8-903d-4cc6696c9e7a" containerID="c9ce651ae7bd83610ac3da78634fbc805214ed291c9f3883f73abaec3e5353d9" exitCode=0 Nov 25 14:47:30 crc kubenswrapper[4879]: I1125 14:47:30.041593 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7q9dx" event={"ID":"fc93ab85-eac9-43b8-903d-4cc6696c9e7a","Type":"ContainerDied","Data":"c9ce651ae7bd83610ac3da78634fbc805214ed291c9f3883f73abaec3e5353d9"} Nov 25 14:47:31 crc kubenswrapper[4879]: E1125 14:47:31.156847 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Nov 25 14:47:31 crc kubenswrapper[4879]: E1125 14:47:31.156969 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6lgxv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-b29mr_openstack(41a3bd8d-a2e0-401e-b2f6-10f076e3710e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:47:31 crc kubenswrapper[4879]: E1125 14:47:31.159036 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-b29mr" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" Nov 25 14:47:32 crc kubenswrapper[4879]: E1125 14:47:32.057940 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-b29mr" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" Nov 25 14:47:32 crc kubenswrapper[4879]: E1125 14:47:32.582532 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Nov 25 14:47:32 crc kubenswrapper[4879]: E1125 14:47:32.583136 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jwvs7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-bstpg_openstack(ed169ce2-81b3-4579-8f37-f45052a7b15d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:47:32 crc kubenswrapper[4879]: E1125 14:47:32.584657 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-bstpg" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" Nov 25 14:47:33 crc kubenswrapper[4879]: E1125 14:47:33.071642 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-bstpg" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" Nov 25 14:47:34 crc kubenswrapper[4879]: E1125 14:47:34.009083 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Nov 25 14:47:34 crc kubenswrapper[4879]: E1125 14:47:34.009343 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w5h62,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-zlv8f_openstack(b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:47:34 crc kubenswrapper[4879]: E1125 14:47:34.010526 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-zlv8f" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.080910 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7q9dx" event={"ID":"fc93ab85-eac9-43b8-903d-4cc6696c9e7a","Type":"ContainerDied","Data":"3abac021aca0c46096eb5edfe08f43b501734fa43f0105a783a0c43c70031f42"} Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.081435 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3abac021aca0c46096eb5edfe08f43b501734fa43f0105a783a0c43c70031f42" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.087696 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" event={"ID":"89f9ccb7-32af-4de5-b9a7-8771989d8a1b","Type":"ContainerDied","Data":"ade4ad8f4e3bf9fbbea988b9fd44da7b05ec6f3bc912a00b0e57e7572b02a11c"} Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.087739 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ade4ad8f4e3bf9fbbea988b9fd44da7b05ec6f3bc912a00b0e57e7572b02a11c" Nov 25 14:47:34 crc kubenswrapper[4879]: E1125 14:47:34.088792 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-zlv8f" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.175019 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.186306 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328169 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-fernet-keys\") pod \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328246 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-credential-keys\") pod \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328270 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-combined-ca-bundle\") pod \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328317 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9zs9\" (UniqueName: \"kubernetes.io/projected/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-kube-api-access-w9zs9\") pod \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328404 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgv77\" (UniqueName: \"kubernetes.io/projected/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-kube-api-access-hgv77\") pod \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328445 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-nb\") pod \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328471 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-config\") pod \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328502 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-dns-svc\") pod \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328549 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-config-data\") pod \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328662 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-sb\") pod \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\" (UID: \"89f9ccb7-32af-4de5-b9a7-8771989d8a1b\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.328688 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-scripts\") pod \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\" (UID: \"fc93ab85-eac9-43b8-903d-4cc6696c9e7a\") " Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.336493 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fc93ab85-eac9-43b8-903d-4cc6696c9e7a" (UID: "fc93ab85-eac9-43b8-903d-4cc6696c9e7a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.338558 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-scripts" (OuterVolumeSpecName: "scripts") pod "fc93ab85-eac9-43b8-903d-4cc6696c9e7a" (UID: "fc93ab85-eac9-43b8-903d-4cc6696c9e7a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.361532 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fc93ab85-eac9-43b8-903d-4cc6696c9e7a" (UID: "fc93ab85-eac9-43b8-903d-4cc6696c9e7a"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.376384 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-kube-api-access-w9zs9" (OuterVolumeSpecName: "kube-api-access-w9zs9") pod "fc93ab85-eac9-43b8-903d-4cc6696c9e7a" (UID: "fc93ab85-eac9-43b8-903d-4cc6696c9e7a"). InnerVolumeSpecName "kube-api-access-w9zs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.389328 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-kube-api-access-hgv77" (OuterVolumeSpecName: "kube-api-access-hgv77") pod "89f9ccb7-32af-4de5-b9a7-8771989d8a1b" (UID: "89f9ccb7-32af-4de5-b9a7-8771989d8a1b"). InnerVolumeSpecName "kube-api-access-hgv77". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.430873 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.430912 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.430936 4879 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.430949 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9zs9\" (UniqueName: \"kubernetes.io/projected/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-kube-api-access-w9zs9\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.430976 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgv77\" (UniqueName: \"kubernetes.io/projected/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-kube-api-access-hgv77\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.444597 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-config-data" (OuterVolumeSpecName: "config-data") pod "fc93ab85-eac9-43b8-903d-4cc6696c9e7a" (UID: "fc93ab85-eac9-43b8-903d-4cc6696c9e7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.489295 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc93ab85-eac9-43b8-903d-4cc6696c9e7a" (UID: "fc93ab85-eac9-43b8-903d-4cc6696c9e7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.499541 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "89f9ccb7-32af-4de5-b9a7-8771989d8a1b" (UID: "89f9ccb7-32af-4de5-b9a7-8771989d8a1b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.500710 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-config" (OuterVolumeSpecName: "config") pod "89f9ccb7-32af-4de5-b9a7-8771989d8a1b" (UID: "89f9ccb7-32af-4de5-b9a7-8771989d8a1b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.510585 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "89f9ccb7-32af-4de5-b9a7-8771989d8a1b" (UID: "89f9ccb7-32af-4de5-b9a7-8771989d8a1b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.512412 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "89f9ccb7-32af-4de5-b9a7-8771989d8a1b" (UID: "89f9ccb7-32af-4de5-b9a7-8771989d8a1b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.532921 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.532958 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.532971 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.532981 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.532992 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89f9ccb7-32af-4de5-b9a7-8771989d8a1b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:34 crc kubenswrapper[4879]: I1125 14:47:34.533002 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc93ab85-eac9-43b8-903d-4cc6696c9e7a-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.096491 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerStarted","Data":"8eafea892e011e808ad324d6a47609d2b95f1e38a80b0035a3b8b7bd85590210"} Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.096517 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.096514 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7q9dx" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.152625 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-xb84j"] Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.163382 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-xb84j"] Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.328324 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7q9dx"] Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.352335 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7q9dx"] Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.384977 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vtvqt"] Nov 25 14:47:35 crc kubenswrapper[4879]: E1125 14:47:35.388366 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388390 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: E1125 14:47:35.388407 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerName="dnsmasq-dns" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388412 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerName="dnsmasq-dns" Nov 25 14:47:35 crc kubenswrapper[4879]: E1125 14:47:35.388422 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388430 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: E1125 14:47:35.388442 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388449 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" Nov 25 14:47:35 crc kubenswrapper[4879]: E1125 14:47:35.388469 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daea822a-3378-47e5-8402-f7e781f369bb" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388476 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="daea822a-3378-47e5-8402-f7e781f369bb" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: E1125 14:47:35.388488 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc93ab85-eac9-43b8-903d-4cc6696c9e7a" containerName="keystone-bootstrap" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388495 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc93ab85-eac9-43b8-903d-4cc6696c9e7a" containerName="keystone-bootstrap" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388680 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388696 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="daea822a-3378-47e5-8402-f7e781f369bb" containerName="init" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388714 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc93ab85-eac9-43b8-903d-4cc6696c9e7a" containerName="keystone-bootstrap" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.388731 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d080cad-8176-4bf2-811b-362e9f6fd534" containerName="dnsmasq-dns" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.389337 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.392691 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.392937 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.393102 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.393371 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.393521 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mzjv2" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.405542 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vtvqt"] Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.550149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-combined-ca-bundle\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.550261 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-scripts\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.550283 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-config-data\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.550318 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-credential-keys\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.550356 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd89l\" (UniqueName: \"kubernetes.io/projected/e0bbad7b-911f-46df-a620-15fd94df5326-kube-api-access-rd89l\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.550394 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-fernet-keys\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.651814 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-fernet-keys\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.651886 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-combined-ca-bundle\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.651988 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-scripts\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.652014 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-config-data\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.652065 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-credential-keys\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.652115 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd89l\" (UniqueName: \"kubernetes.io/projected/e0bbad7b-911f-46df-a620-15fd94df5326-kube-api-access-rd89l\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.656716 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-scripts\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.657103 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" path="/var/lib/kubelet/pods/89f9ccb7-32af-4de5-b9a7-8771989d8a1b/volumes" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.657425 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-credential-keys\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.657823 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-fernet-keys\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.658541 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc93ab85-eac9-43b8-903d-4cc6696c9e7a" path="/var/lib/kubelet/pods/fc93ab85-eac9-43b8-903d-4cc6696c9e7a/volumes" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.661786 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-combined-ca-bundle\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.662483 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-config-data\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.669698 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd89l\" (UniqueName: \"kubernetes.io/projected/e0bbad7b-911f-46df-a620-15fd94df5326-kube-api-access-rd89l\") pod \"keystone-bootstrap-vtvqt\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:35 crc kubenswrapper[4879]: I1125 14:47:35.707014 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:47:36 crc kubenswrapper[4879]: I1125 14:47:36.271163 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-xb84j" podUID="89f9ccb7-32af-4de5-b9a7-8771989d8a1b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.118:5353: i/o timeout" Nov 25 14:47:36 crc kubenswrapper[4879]: I1125 14:47:36.537234 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vtvqt"] Nov 25 14:47:36 crc kubenswrapper[4879]: W1125 14:47:36.542893 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0bbad7b_911f_46df_a620_15fd94df5326.slice/crio-a69e3884b08bd811abac4b28a5700bd262e50725c5420bc2267dea67e224ff0a WatchSource:0}: Error finding container a69e3884b08bd811abac4b28a5700bd262e50725c5420bc2267dea67e224ff0a: Status 404 returned error can't find the container with id a69e3884b08bd811abac4b28a5700bd262e50725c5420bc2267dea67e224ff0a Nov 25 14:47:37 crc kubenswrapper[4879]: I1125 14:47:37.114270 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerStarted","Data":"1ecb9b9e91afd6806729ff513a9a98d92cb8f9cc36ee0a4615fe29f265e21ee3"} Nov 25 14:47:37 crc kubenswrapper[4879]: I1125 14:47:37.115767 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtvqt" event={"ID":"e0bbad7b-911f-46df-a620-15fd94df5326","Type":"ContainerStarted","Data":"356c8b0d5d0a619eca770586e3ffc2155074fbe2adbe3a513de619e7a0a60203"} Nov 25 14:47:37 crc kubenswrapper[4879]: I1125 14:47:37.115786 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtvqt" event={"ID":"e0bbad7b-911f-46df-a620-15fd94df5326","Type":"ContainerStarted","Data":"a69e3884b08bd811abac4b28a5700bd262e50725c5420bc2267dea67e224ff0a"} Nov 25 14:47:37 crc kubenswrapper[4879]: I1125 14:47:37.134381 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vtvqt" podStartSLOduration=2.13436095 podStartE2EDuration="2.13436095s" podCreationTimestamp="2025-11-25 14:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:47:37.129479077 +0000 UTC m=+1348.732892148" watchObservedRunningTime="2025-11-25 14:47:37.13436095 +0000 UTC m=+1348.737774021" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.097750 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k98hv"] Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.100004 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.111968 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k98hv"] Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.267084 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-utilities\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.267233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqxhx\" (UniqueName: \"kubernetes.io/projected/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-kube-api-access-fqxhx\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.267297 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-catalog-content\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.368970 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqxhx\" (UniqueName: \"kubernetes.io/projected/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-kube-api-access-fqxhx\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.369025 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-catalog-content\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.369178 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-utilities\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.369736 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-catalog-content\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.369760 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-utilities\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.390015 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqxhx\" (UniqueName: \"kubernetes.io/projected/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-kube-api-access-fqxhx\") pod \"redhat-operators-k98hv\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:47:47 crc kubenswrapper[4879]: I1125 14:47:47.432094 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.581411 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.583252 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.585453 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.585720 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.594903 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.697555 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa5173f-153f-4183-8022-8b6e2270368f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.697895 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa5173f-153f-4183-8022-8b6e2270368f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.800036 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa5173f-153f-4183-8022-8b6e2270368f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.800141 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa5173f-153f-4183-8022-8b6e2270368f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.800154 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa5173f-153f-4183-8022-8b6e2270368f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.820483 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa5173f-153f-4183-8022-8b6e2270368f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:15 crc kubenswrapper[4879]: I1125 14:48:15.903359 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:17 crc kubenswrapper[4879]: I1125 14:48:17.408701 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:48:17 crc kubenswrapper[4879]: I1125 14:48:17.409070 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.180658 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.181921 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.192955 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.300788 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-var-lock\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.300833 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/211ea349-e87d-41a4-aa0e-e07b5cd25946-kube-api-access\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.300909 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-kubelet-dir\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.402236 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-var-lock\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.402291 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/211ea349-e87d-41a4-aa0e-e07b5cd25946-kube-api-access\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.402338 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-kubelet-dir\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.402377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-var-lock\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.402504 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-kubelet-dir\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.419219 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/211ea349-e87d-41a4-aa0e-e07b5cd25946-kube-api-access\") pod \"installer-9-crc\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:21 crc kubenswrapper[4879]: I1125 14:48:21.504680 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.100217 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.100865 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w5h62,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-zlv8f_openstack(b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.102404 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-zlv8f" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.121577 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.121763 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jwvs7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-bstpg_openstack(ed169ce2-81b3-4579-8f37-f45052a7b15d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.122312 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.122495 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6lgxv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-b29mr_openstack(41a3bd8d-a2e0-401e-b2f6-10f076e3710e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.123328 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-bstpg" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.124438 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-b29mr" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.239245 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/sg-core:latest" Nov 25 14:48:25 crc kubenswrapper[4879]: E1125 14:48:25.239685 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/openstack-k8s-operators/sg-core:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:sg-core-conf-yaml,ReadOnly:false,MountPath:/etc/sg-core.conf.yaml,SubPath:sg-core.conf.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rvzls,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(d66a89b6-8fa7-4eb7-8464-41783ec1a26c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:48:25 crc kubenswrapper[4879]: I1125 14:48:25.241883 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 14:48:25 crc kubenswrapper[4879]: I1125 14:48:25.517089 4879 generic.go:334] "Generic (PLEG): container finished" podID="e0bbad7b-911f-46df-a620-15fd94df5326" containerID="356c8b0d5d0a619eca770586e3ffc2155074fbe2adbe3a513de619e7a0a60203" exitCode=0 Nov 25 14:48:25 crc kubenswrapper[4879]: I1125 14:48:25.517144 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtvqt" event={"ID":"e0bbad7b-911f-46df-a620-15fd94df5326","Type":"ContainerDied","Data":"356c8b0d5d0a619eca770586e3ffc2155074fbe2adbe3a513de619e7a0a60203"} Nov 25 14:48:25 crc kubenswrapper[4879]: W1125 14:48:25.615618 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod211ea349_e87d_41a4_aa0e_e07b5cd25946.slice/crio-c1c607d6dda4f3f4a7c9021b5c36e9cd186b318dfc56941017d82677d5de16f4 WatchSource:0}: Error finding container c1c607d6dda4f3f4a7c9021b5c36e9cd186b318dfc56941017d82677d5de16f4: Status 404 returned error can't find the container with id c1c607d6dda4f3f4a7c9021b5c36e9cd186b318dfc56941017d82677d5de16f4 Nov 25 14:48:25 crc kubenswrapper[4879]: I1125 14:48:25.615751 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 25 14:48:25 crc kubenswrapper[4879]: I1125 14:48:25.627304 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 25 14:48:25 crc kubenswrapper[4879]: I1125 14:48:25.637176 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k98hv"] Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.527526 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"211ea349-e87d-41a4-aa0e-e07b5cd25946","Type":"ContainerStarted","Data":"1109de26864c0e4e71e7a66146bc7c09399b59c794ad54e48ec6374d8352ffc9"} Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.527876 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"211ea349-e87d-41a4-aa0e-e07b5cd25946","Type":"ContainerStarted","Data":"c1c607d6dda4f3f4a7c9021b5c36e9cd186b318dfc56941017d82677d5de16f4"} Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.529485 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"eaa5173f-153f-4183-8022-8b6e2270368f","Type":"ContainerStarted","Data":"a234195f6ab6d65cc767d6675f6013d91ae7dcf0a5531f737a0b89bb98f46550"} Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.529619 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"eaa5173f-153f-4183-8022-8b6e2270368f","Type":"ContainerStarted","Data":"03db7f54c1dbe4f2218e2b85d8410e986b5035d996438b6886061d2a51c19729"} Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.531294 4879 generic.go:334] "Generic (PLEG): container finished" podID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerID="646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab" exitCode=0 Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.531993 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k98hv" event={"ID":"3d1a00f0-c95b-41f1-b520-38778d9e9fb5","Type":"ContainerDied","Data":"646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab"} Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.532032 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k98hv" event={"ID":"3d1a00f0-c95b-41f1-b520-38778d9e9fb5","Type":"ContainerStarted","Data":"7284b76067e2439f2f70ab6699767830574b116183107d462009bc55697002f0"} Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.916204 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.996298 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-credential-keys\") pod \"e0bbad7b-911f-46df-a620-15fd94df5326\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.996368 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-combined-ca-bundle\") pod \"e0bbad7b-911f-46df-a620-15fd94df5326\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.996409 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-scripts\") pod \"e0bbad7b-911f-46df-a620-15fd94df5326\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.996467 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd89l\" (UniqueName: \"kubernetes.io/projected/e0bbad7b-911f-46df-a620-15fd94df5326-kube-api-access-rd89l\") pod \"e0bbad7b-911f-46df-a620-15fd94df5326\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.996571 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-config-data\") pod \"e0bbad7b-911f-46df-a620-15fd94df5326\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " Nov 25 14:48:26 crc kubenswrapper[4879]: I1125 14:48:26.996664 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-fernet-keys\") pod \"e0bbad7b-911f-46df-a620-15fd94df5326\" (UID: \"e0bbad7b-911f-46df-a620-15fd94df5326\") " Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.002924 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-scripts" (OuterVolumeSpecName: "scripts") pod "e0bbad7b-911f-46df-a620-15fd94df5326" (UID: "e0bbad7b-911f-46df-a620-15fd94df5326"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.003631 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0bbad7b-911f-46df-a620-15fd94df5326-kube-api-access-rd89l" (OuterVolumeSpecName: "kube-api-access-rd89l") pod "e0bbad7b-911f-46df-a620-15fd94df5326" (UID: "e0bbad7b-911f-46df-a620-15fd94df5326"). InnerVolumeSpecName "kube-api-access-rd89l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.013351 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e0bbad7b-911f-46df-a620-15fd94df5326" (UID: "e0bbad7b-911f-46df-a620-15fd94df5326"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.013979 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e0bbad7b-911f-46df-a620-15fd94df5326" (UID: "e0bbad7b-911f-46df-a620-15fd94df5326"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.024699 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-config-data" (OuterVolumeSpecName: "config-data") pod "e0bbad7b-911f-46df-a620-15fd94df5326" (UID: "e0bbad7b-911f-46df-a620-15fd94df5326"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.026555 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0bbad7b-911f-46df-a620-15fd94df5326" (UID: "e0bbad7b-911f-46df-a620-15fd94df5326"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.098808 4879 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.098848 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.098860 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.098871 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd89l\" (UniqueName: \"kubernetes.io/projected/e0bbad7b-911f-46df-a620-15fd94df5326-kube-api-access-rd89l\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.098885 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.098895 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0bbad7b-911f-46df-a620-15fd94df5326-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.564544 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtvqt" event={"ID":"e0bbad7b-911f-46df-a620-15fd94df5326","Type":"ContainerDied","Data":"a69e3884b08bd811abac4b28a5700bd262e50725c5420bc2267dea67e224ff0a"} Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.564595 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a69e3884b08bd811abac4b28a5700bd262e50725c5420bc2267dea67e224ff0a" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.564569 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtvqt" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.568840 4879 generic.go:334] "Generic (PLEG): container finished" podID="eaa5173f-153f-4183-8022-8b6e2270368f" containerID="a234195f6ab6d65cc767d6675f6013d91ae7dcf0a5531f737a0b89bb98f46550" exitCode=0 Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.569181 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"eaa5173f-153f-4183-8022-8b6e2270368f","Type":"ContainerDied","Data":"a234195f6ab6d65cc767d6675f6013d91ae7dcf0a5531f737a0b89bb98f46550"} Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.595884 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.595866986 podStartE2EDuration="6.595866986s" podCreationTimestamp="2025-11-25 14:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:48:27.587601729 +0000 UTC m=+1399.191014790" watchObservedRunningTime="2025-11-25 14:48:27.595866986 +0000 UTC m=+1399.199280057" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.662924 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-77b6b86f95-nr5cz"] Nov 25 14:48:27 crc kubenswrapper[4879]: E1125 14:48:27.663269 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0bbad7b-911f-46df-a620-15fd94df5326" containerName="keystone-bootstrap" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.663289 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0bbad7b-911f-46df-a620-15fd94df5326" containerName="keystone-bootstrap" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.663505 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0bbad7b-911f-46df-a620-15fd94df5326" containerName="keystone-bootstrap" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.664207 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-77b6b86f95-nr5cz"] Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.664296 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.666646 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.666848 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mzjv2" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.668255 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.668456 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.668654 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.668836 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.714961 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-combined-ca-bundle\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.715561 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-config-data\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.715770 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdgs9\" (UniqueName: \"kubernetes.io/projected/df7b7503-cc0d-48fe-be8a-75f2362edebf-kube-api-access-zdgs9\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.715930 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-credential-keys\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.716029 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-internal-tls-certs\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.716174 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-public-tls-certs\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.716332 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-scripts\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.716432 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-fernet-keys\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818091 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-combined-ca-bundle\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818168 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-config-data\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818232 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdgs9\" (UniqueName: \"kubernetes.io/projected/df7b7503-cc0d-48fe-be8a-75f2362edebf-kube-api-access-zdgs9\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818307 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-credential-keys\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818336 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-internal-tls-certs\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818376 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-public-tls-certs\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818430 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-scripts\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.818458 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-fernet-keys\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.824843 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-scripts\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.825327 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-internal-tls-certs\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.826162 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-config-data\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.828009 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-credential-keys\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.831843 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-public-tls-certs\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.833817 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-combined-ca-bundle\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.835626 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-fernet-keys\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.841607 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdgs9\" (UniqueName: \"kubernetes.io/projected/df7b7503-cc0d-48fe-be8a-75f2362edebf-kube-api-access-zdgs9\") pod \"keystone-77b6b86f95-nr5cz\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:27 crc kubenswrapper[4879]: I1125 14:48:27.995041 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:28 crc kubenswrapper[4879]: I1125 14:48:28.464411 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-77b6b86f95-nr5cz"] Nov 25 14:48:28 crc kubenswrapper[4879]: W1125 14:48:28.496636 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf7b7503_cc0d_48fe_be8a_75f2362edebf.slice/crio-811f99f745dacfd092b7a6dca49b7790a0cc6b88c2f53f79491b0f931b893e7b WatchSource:0}: Error finding container 811f99f745dacfd092b7a6dca49b7790a0cc6b88c2f53f79491b0f931b893e7b: Status 404 returned error can't find the container with id 811f99f745dacfd092b7a6dca49b7790a0cc6b88c2f53f79491b0f931b893e7b Nov 25 14:48:28 crc kubenswrapper[4879]: I1125 14:48:28.581284 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-77b6b86f95-nr5cz" event={"ID":"df7b7503-cc0d-48fe-be8a-75f2362edebf","Type":"ContainerStarted","Data":"811f99f745dacfd092b7a6dca49b7790a0cc6b88c2f53f79491b0f931b893e7b"} Nov 25 14:48:28 crc kubenswrapper[4879]: I1125 14:48:28.994062 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.056074 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa5173f-153f-4183-8022-8b6e2270368f-kubelet-dir\") pod \"eaa5173f-153f-4183-8022-8b6e2270368f\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.056249 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa5173f-153f-4183-8022-8b6e2270368f-kube-api-access\") pod \"eaa5173f-153f-4183-8022-8b6e2270368f\" (UID: \"eaa5173f-153f-4183-8022-8b6e2270368f\") " Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.056281 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eaa5173f-153f-4183-8022-8b6e2270368f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "eaa5173f-153f-4183-8022-8b6e2270368f" (UID: "eaa5173f-153f-4183-8022-8b6e2270368f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.056820 4879 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eaa5173f-153f-4183-8022-8b6e2270368f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.073624 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaa5173f-153f-4183-8022-8b6e2270368f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "eaa5173f-153f-4183-8022-8b6e2270368f" (UID: "eaa5173f-153f-4183-8022-8b6e2270368f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.158641 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eaa5173f-153f-4183-8022-8b6e2270368f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.595049 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"eaa5173f-153f-4183-8022-8b6e2270368f","Type":"ContainerDied","Data":"03db7f54c1dbe4f2218e2b85d8410e986b5035d996438b6886061d2a51c19729"} Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.595397 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03db7f54c1dbe4f2218e2b85d8410e986b5035d996438b6886061d2a51c19729" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.595473 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.618705 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-77b6b86f95-nr5cz" event={"ID":"df7b7503-cc0d-48fe-be8a-75f2362edebf","Type":"ContainerStarted","Data":"c9cf3523b0a1a1aafb8fd8490358b7f25d0f8537ee1e95391e205a2a8c57be8d"} Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.619988 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.656156 4879 generic.go:334] "Generic (PLEG): container finished" podID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerID="534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc" exitCode=0 Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.667782 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k98hv" event={"ID":"3d1a00f0-c95b-41f1-b520-38778d9e9fb5","Type":"ContainerDied","Data":"534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc"} Nov 25 14:48:29 crc kubenswrapper[4879]: I1125 14:48:29.705306 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-77b6b86f95-nr5cz" podStartSLOduration=2.70511591 podStartE2EDuration="2.70511591s" podCreationTimestamp="2025-11-25 14:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:48:29.658314035 +0000 UTC m=+1401.261727116" watchObservedRunningTime="2025-11-25 14:48:29.70511591 +0000 UTC m=+1401.308528991" Nov 25 14:48:35 crc kubenswrapper[4879]: E1125 14:48:35.660809 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.726723 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k98hv" event={"ID":"3d1a00f0-c95b-41f1-b520-38778d9e9fb5","Type":"ContainerStarted","Data":"c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44"} Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.728737 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerStarted","Data":"43c580d45380b1fdba520fba58621ee847dad067f001497e0ac5e474479aaccc"} Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.728946 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-central-agent" containerID="cri-o://8eafea892e011e808ad324d6a47609d2b95f1e38a80b0035a3b8b7bd85590210" gracePeriod=30 Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.728994 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.728999 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="proxy-httpd" containerID="cri-o://43c580d45380b1fdba520fba58621ee847dad067f001497e0ac5e474479aaccc" gracePeriod=30 Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.729018 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-notification-agent" containerID="cri-o://1ecb9b9e91afd6806729ff513a9a98d92cb8f9cc36ee0a4615fe29f265e21ee3" gracePeriod=30 Nov 25 14:48:35 crc kubenswrapper[4879]: I1125 14:48:35.757241 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k98hv" podStartSLOduration=40.054025011 podStartE2EDuration="48.757222886s" podCreationTimestamp="2025-11-25 14:47:47 +0000 UTC" firstStartedPulling="2025-11-25 14:48:26.532974557 +0000 UTC m=+1398.136387628" lastFinishedPulling="2025-11-25 14:48:35.236172432 +0000 UTC m=+1406.839585503" observedRunningTime="2025-11-25 14:48:35.74678511 +0000 UTC m=+1407.350198181" watchObservedRunningTime="2025-11-25 14:48:35.757222886 +0000 UTC m=+1407.360635967" Nov 25 14:48:36 crc kubenswrapper[4879]: I1125 14:48:36.740719 4879 generic.go:334] "Generic (PLEG): container finished" podID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerID="43c580d45380b1fdba520fba58621ee847dad067f001497e0ac5e474479aaccc" exitCode=0 Nov 25 14:48:36 crc kubenswrapper[4879]: I1125 14:48:36.741035 4879 generic.go:334] "Generic (PLEG): container finished" podID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerID="8eafea892e011e808ad324d6a47609d2b95f1e38a80b0035a3b8b7bd85590210" exitCode=0 Nov 25 14:48:36 crc kubenswrapper[4879]: I1125 14:48:36.740782 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerDied","Data":"43c580d45380b1fdba520fba58621ee847dad067f001497e0ac5e474479aaccc"} Nov 25 14:48:36 crc kubenswrapper[4879]: I1125 14:48:36.741392 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerDied","Data":"8eafea892e011e808ad324d6a47609d2b95f1e38a80b0035a3b8b7bd85590210"} Nov 25 14:48:37 crc kubenswrapper[4879]: I1125 14:48:37.432982 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:37 crc kubenswrapper[4879]: I1125 14:48:37.433052 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:38 crc kubenswrapper[4879]: I1125 14:48:38.481747 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-k98hv" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="registry-server" probeResult="failure" output=< Nov 25 14:48:38 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:48:38 crc kubenswrapper[4879]: > Nov 25 14:48:38 crc kubenswrapper[4879]: E1125 14:48:38.645892 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-b29mr" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" Nov 25 14:48:39 crc kubenswrapper[4879]: E1125 14:48:39.657926 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-bstpg" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" Nov 25 14:48:39 crc kubenswrapper[4879]: I1125 14:48:39.767143 4879 generic.go:334] "Generic (PLEG): container finished" podID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerID="1ecb9b9e91afd6806729ff513a9a98d92cb8f9cc36ee0a4615fe29f265e21ee3" exitCode=0 Nov 25 14:48:39 crc kubenswrapper[4879]: I1125 14:48:39.767168 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerDied","Data":"1ecb9b9e91afd6806729ff513a9a98d92cb8f9cc36ee0a4615fe29f265e21ee3"} Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.034391 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161241 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-log-httpd\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161286 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-run-httpd\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161329 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-sg-core-conf-yaml\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161398 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvzls\" (UniqueName: \"kubernetes.io/projected/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-kube-api-access-rvzls\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161461 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-config-data\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161534 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-combined-ca-bundle\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161564 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-scripts\") pod \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\" (UID: \"d66a89b6-8fa7-4eb7-8464-41783ec1a26c\") " Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.161958 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.162281 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.168601 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.168980 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-kube-api-access-rvzls" (OuterVolumeSpecName: "kube-api-access-rvzls") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "kube-api-access-rvzls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.170383 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-scripts" (OuterVolumeSpecName: "scripts") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.239117 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.262957 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.262989 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.263173 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.263189 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvzls\" (UniqueName: \"kubernetes.io/projected/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-kube-api-access-rvzls\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.263202 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.263212 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.264866 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-config-data" (OuterVolumeSpecName: "config-data") pod "d66a89b6-8fa7-4eb7-8464-41783ec1a26c" (UID: "d66a89b6-8fa7-4eb7-8464-41783ec1a26c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.365028 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d66a89b6-8fa7-4eb7-8464-41783ec1a26c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:40 crc kubenswrapper[4879]: E1125 14:48:40.646423 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-zlv8f" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.780933 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d66a89b6-8fa7-4eb7-8464-41783ec1a26c","Type":"ContainerDied","Data":"24118a9c853e552fb4d66127a1a8e54dfd2d3c8841811158283086bfc836358f"} Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.781013 4879 scope.go:117] "RemoveContainer" containerID="43c580d45380b1fdba520fba58621ee847dad067f001497e0ac5e474479aaccc" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.781024 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.802733 4879 scope.go:117] "RemoveContainer" containerID="1ecb9b9e91afd6806729ff513a9a98d92cb8f9cc36ee0a4615fe29f265e21ee3" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.832732 4879 scope.go:117] "RemoveContainer" containerID="8eafea892e011e808ad324d6a47609d2b95f1e38a80b0035a3b8b7bd85590210" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.853358 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.861513 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.878450 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:48:40 crc kubenswrapper[4879]: E1125 14:48:40.878869 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-notification-agent" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.878890 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-notification-agent" Nov 25 14:48:40 crc kubenswrapper[4879]: E1125 14:48:40.878922 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="proxy-httpd" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.878931 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="proxy-httpd" Nov 25 14:48:40 crc kubenswrapper[4879]: E1125 14:48:40.878944 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa5173f-153f-4183-8022-8b6e2270368f" containerName="pruner" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.878952 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa5173f-153f-4183-8022-8b6e2270368f" containerName="pruner" Nov 25 14:48:40 crc kubenswrapper[4879]: E1125 14:48:40.878968 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-central-agent" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.878976 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-central-agent" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.879319 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-central-agent" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.879345 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="ceilometer-notification-agent" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.879364 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" containerName="proxy-httpd" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.879381 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaa5173f-153f-4183-8022-8b6e2270368f" containerName="pruner" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.881513 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.883713 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.883905 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.896669 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.978722 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.978813 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljwhs\" (UniqueName: \"kubernetes.io/projected/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-kube-api-access-ljwhs\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.978863 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-run-httpd\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.978975 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-log-httpd\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.979070 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-config-data\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.979271 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:40 crc kubenswrapper[4879]: I1125 14:48:40.979309 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-scripts\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080275 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080633 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-scripts\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080675 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080699 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljwhs\" (UniqueName: \"kubernetes.io/projected/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-kube-api-access-ljwhs\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080727 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-run-httpd\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080763 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-log-httpd\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.080803 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-config-data\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.081473 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-run-httpd\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.081498 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-log-httpd\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.086164 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.091272 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-config-data\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.091823 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.098927 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-scripts\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.118338 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljwhs\" (UniqueName: \"kubernetes.io/projected/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-kube-api-access-ljwhs\") pod \"ceilometer-0\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.208698 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.637250 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:48:41 crc kubenswrapper[4879]: W1125 14:48:41.649804 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49f50af1_0ea3_44a7_8cd9_a22a4009f6a1.slice/crio-a462db2127462daefe35b313babef30703afdeca15eeaf43aa4343cf28b198dc WatchSource:0}: Error finding container a462db2127462daefe35b313babef30703afdeca15eeaf43aa4343cf28b198dc: Status 404 returned error can't find the container with id a462db2127462daefe35b313babef30703afdeca15eeaf43aa4343cf28b198dc Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.657535 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d66a89b6-8fa7-4eb7-8464-41783ec1a26c" path="/var/lib/kubelet/pods/d66a89b6-8fa7-4eb7-8464-41783ec1a26c/volumes" Nov 25 14:48:41 crc kubenswrapper[4879]: I1125 14:48:41.790537 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerStarted","Data":"a462db2127462daefe35b313babef30703afdeca15eeaf43aa4343cf28b198dc"} Nov 25 14:48:43 crc kubenswrapper[4879]: I1125 14:48:43.830370 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerStarted","Data":"963285dbb567e3fb50840338ce65066a0f57286a081eee1e4fde35ff6d3a7ae7"} Nov 25 14:48:44 crc kubenswrapper[4879]: I1125 14:48:44.842155 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerStarted","Data":"4b3e2f1a92c3ec318d8860400eed136cf021710c1bf69b80c09ebe60b3bd9674"} Nov 25 14:48:46 crc kubenswrapper[4879]: I1125 14:48:46.861003 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerStarted","Data":"e71cfe4c8214f91984fbd214b7b60fe3f8777249c064f99b0f5735128c0af738"} Nov 25 14:48:47 crc kubenswrapper[4879]: I1125 14:48:47.408988 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:48:47 crc kubenswrapper[4879]: I1125 14:48:47.409045 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:48:47 crc kubenswrapper[4879]: I1125 14:48:47.483592 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:47 crc kubenswrapper[4879]: I1125 14:48:47.530744 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:48 crc kubenswrapper[4879]: I1125 14:48:48.314374 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k98hv"] Nov 25 14:48:48 crc kubenswrapper[4879]: I1125 14:48:48.877564 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k98hv" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="registry-server" containerID="cri-o://c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44" gracePeriod=2 Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.671728 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.785077 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqxhx\" (UniqueName: \"kubernetes.io/projected/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-kube-api-access-fqxhx\") pod \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.785235 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-utilities\") pod \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.785448 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-catalog-content\") pod \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\" (UID: \"3d1a00f0-c95b-41f1-b520-38778d9e9fb5\") " Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.786099 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-utilities" (OuterVolumeSpecName: "utilities") pod "3d1a00f0-c95b-41f1-b520-38778d9e9fb5" (UID: "3d1a00f0-c95b-41f1-b520-38778d9e9fb5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.786499 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.789827 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-kube-api-access-fqxhx" (OuterVolumeSpecName: "kube-api-access-fqxhx") pod "3d1a00f0-c95b-41f1-b520-38778d9e9fb5" (UID: "3d1a00f0-c95b-41f1-b520-38778d9e9fb5"). InnerVolumeSpecName "kube-api-access-fqxhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.880984 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d1a00f0-c95b-41f1-b520-38778d9e9fb5" (UID: "3d1a00f0-c95b-41f1-b520-38778d9e9fb5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.888934 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqxhx\" (UniqueName: \"kubernetes.io/projected/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-kube-api-access-fqxhx\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.889018 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d1a00f0-c95b-41f1-b520-38778d9e9fb5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.912572 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerStarted","Data":"6ccf158ca17dbc7206f394a97a3f6e39c2fdb7c9caace41a3e5e0fd60578c244"} Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.913448 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.915876 4879 generic.go:334] "Generic (PLEG): container finished" podID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerID="c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44" exitCode=0 Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.915930 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k98hv" event={"ID":"3d1a00f0-c95b-41f1-b520-38778d9e9fb5","Type":"ContainerDied","Data":"c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44"} Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.915952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k98hv" event={"ID":"3d1a00f0-c95b-41f1-b520-38778d9e9fb5","Type":"ContainerDied","Data":"7284b76067e2439f2f70ab6699767830574b116183107d462009bc55697002f0"} Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.915967 4879 scope.go:117] "RemoveContainer" containerID="c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.916212 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k98hv" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.926492 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-b29mr" event={"ID":"41a3bd8d-a2e0-401e-b2f6-10f076e3710e","Type":"ContainerStarted","Data":"4e612833ae0b26c1698789035216d1a5fc8ea3d9745279c425d6aa0e79629e25"} Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.930112 4879 generic.go:334] "Generic (PLEG): container finished" podID="b010f021-2ab3-424f-910a-68f969e93561" containerID="c5a3d32bd6664e58ca040b6cfc96b13c945dc2b2fc8f9feefcfd2798ec76fabb" exitCode=0 Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.930170 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7s5gb" event={"ID":"b010f021-2ab3-424f-910a-68f969e93561","Type":"ContainerDied","Data":"c5a3d32bd6664e58ca040b6cfc96b13c945dc2b2fc8f9feefcfd2798ec76fabb"} Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.947351 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.483686225 podStartE2EDuration="12.947332925s" podCreationTimestamp="2025-11-25 14:48:40 +0000 UTC" firstStartedPulling="2025-11-25 14:48:41.651928221 +0000 UTC m=+1413.255341292" lastFinishedPulling="2025-11-25 14:48:52.115574921 +0000 UTC m=+1423.718987992" observedRunningTime="2025-11-25 14:48:52.939354895 +0000 UTC m=+1424.542767986" watchObservedRunningTime="2025-11-25 14:48:52.947332925 +0000 UTC m=+1424.550745996" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.960484 4879 scope.go:117] "RemoveContainer" containerID="534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc" Nov 25 14:48:52 crc kubenswrapper[4879]: I1125 14:48:52.997826 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-b29mr" podStartSLOduration=2.711028229 podStartE2EDuration="1m49.997803055s" podCreationTimestamp="2025-11-25 14:47:03 +0000 UTC" firstStartedPulling="2025-11-25 14:47:05.080263552 +0000 UTC m=+1316.683676623" lastFinishedPulling="2025-11-25 14:48:52.367038378 +0000 UTC m=+1423.970451449" observedRunningTime="2025-11-25 14:48:52.978475412 +0000 UTC m=+1424.581888493" watchObservedRunningTime="2025-11-25 14:48:52.997803055 +0000 UTC m=+1424.601216126" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.000978 4879 scope.go:117] "RemoveContainer" containerID="646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.005626 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k98hv"] Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.013478 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k98hv"] Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.020936 4879 scope.go:117] "RemoveContainer" containerID="c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44" Nov 25 14:48:53 crc kubenswrapper[4879]: E1125 14:48:53.021696 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44\": container with ID starting with c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44 not found: ID does not exist" containerID="c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.021759 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44"} err="failed to get container status \"c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44\": rpc error: code = NotFound desc = could not find container \"c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44\": container with ID starting with c1aff85da5e5094319de1983170662e9ade2eb93b0c4d236017d977bf70a5d44 not found: ID does not exist" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.021796 4879 scope.go:117] "RemoveContainer" containerID="534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc" Nov 25 14:48:53 crc kubenswrapper[4879]: E1125 14:48:53.022219 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc\": container with ID starting with 534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc not found: ID does not exist" containerID="534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.022261 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc"} err="failed to get container status \"534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc\": rpc error: code = NotFound desc = could not find container \"534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc\": container with ID starting with 534a5892508d4838953ca1d8ac0889873df465a88b5c03d99f607fc18fb9d9fc not found: ID does not exist" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.022284 4879 scope.go:117] "RemoveContainer" containerID="646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab" Nov 25 14:48:53 crc kubenswrapper[4879]: E1125 14:48:53.023047 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab\": container with ID starting with 646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab not found: ID does not exist" containerID="646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.023089 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab"} err="failed to get container status \"646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab\": rpc error: code = NotFound desc = could not find container \"646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab\": container with ID starting with 646fb4219614c326cb8859e77d65fd1f5eb9e16675a486c4c6b862a53e461fab not found: ID does not exist" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.657395 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" path="/var/lib/kubelet/pods/3d1a00f0-c95b-41f1-b520-38778d9e9fb5/volumes" Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.945352 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bstpg" event={"ID":"ed169ce2-81b3-4579-8f37-f45052a7b15d","Type":"ContainerStarted","Data":"d803fecf629b7d3ed726ab476ece719d4ac8a1643c196150048521897f49ce98"} Nov 25 14:48:53 crc kubenswrapper[4879]: I1125 14:48:53.977184 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-bstpg" podStartSLOduration=2.699843264 podStartE2EDuration="1m50.977161474s" podCreationTimestamp="2025-11-25 14:47:03 +0000 UTC" firstStartedPulling="2025-11-25 14:47:04.831703158 +0000 UTC m=+1316.435116229" lastFinishedPulling="2025-11-25 14:48:53.109021368 +0000 UTC m=+1424.712434439" observedRunningTime="2025-11-25 14:48:53.967497228 +0000 UTC m=+1425.570910299" watchObservedRunningTime="2025-11-25 14:48:53.977161474 +0000 UTC m=+1425.580574555" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.075723 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7s5gb" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.227670 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4xcz\" (UniqueName: \"kubernetes.io/projected/b010f021-2ab3-424f-910a-68f969e93561-kube-api-access-s4xcz\") pod \"b010f021-2ab3-424f-910a-68f969e93561\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.227746 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-combined-ca-bundle\") pod \"b010f021-2ab3-424f-910a-68f969e93561\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.227802 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-config-data\") pod \"b010f021-2ab3-424f-910a-68f969e93561\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.227828 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-db-sync-config-data\") pod \"b010f021-2ab3-424f-910a-68f969e93561\" (UID: \"b010f021-2ab3-424f-910a-68f969e93561\") " Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.235166 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b010f021-2ab3-424f-910a-68f969e93561" (UID: "b010f021-2ab3-424f-910a-68f969e93561"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.236651 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b010f021-2ab3-424f-910a-68f969e93561-kube-api-access-s4xcz" (OuterVolumeSpecName: "kube-api-access-s4xcz") pod "b010f021-2ab3-424f-910a-68f969e93561" (UID: "b010f021-2ab3-424f-910a-68f969e93561"). InnerVolumeSpecName "kube-api-access-s4xcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.253093 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b010f021-2ab3-424f-910a-68f969e93561" (UID: "b010f021-2ab3-424f-910a-68f969e93561"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.275072 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-config-data" (OuterVolumeSpecName: "config-data") pod "b010f021-2ab3-424f-910a-68f969e93561" (UID: "b010f021-2ab3-424f-910a-68f969e93561"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.330081 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4xcz\" (UniqueName: \"kubernetes.io/projected/b010f021-2ab3-424f-910a-68f969e93561-kube-api-access-s4xcz\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.330178 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.330191 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.330201 4879 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b010f021-2ab3-424f-910a-68f969e93561-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.969999 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7s5gb" event={"ID":"b010f021-2ab3-424f-910a-68f969e93561","Type":"ContainerDied","Data":"411f0902495e95066ca6a18c02a22718945bc4513c479781530bd17ead967474"} Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.970388 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="411f0902495e95066ca6a18c02a22718945bc4513c479781530bd17ead967474" Nov 25 14:48:55 crc kubenswrapper[4879]: I1125 14:48:55.970076 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7s5gb" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.499870 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-gzfqq"] Nov 25 14:48:56 crc kubenswrapper[4879]: E1125 14:48:56.500342 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="extract-content" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.500361 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="extract-content" Nov 25 14:48:56 crc kubenswrapper[4879]: E1125 14:48:56.500386 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="extract-utilities" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.500393 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="extract-utilities" Nov 25 14:48:56 crc kubenswrapper[4879]: E1125 14:48:56.500408 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b010f021-2ab3-424f-910a-68f969e93561" containerName="glance-db-sync" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.500415 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b010f021-2ab3-424f-910a-68f969e93561" containerName="glance-db-sync" Nov 25 14:48:56 crc kubenswrapper[4879]: E1125 14:48:56.500427 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="registry-server" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.500434 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="registry-server" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.500626 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d1a00f0-c95b-41f1-b520-38778d9e9fb5" containerName="registry-server" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.500640 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b010f021-2ab3-424f-910a-68f969e93561" containerName="glance-db-sync" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.501485 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.519685 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-gzfqq"] Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.656249 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.656305 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.656371 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-config\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.656394 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.656418 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.656442 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t82px\" (UniqueName: \"kubernetes.io/projected/9da17217-b666-43b4-8061-84cdd4cedeac-kube-api-access-t82px\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.757953 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.758009 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t82px\" (UniqueName: \"kubernetes.io/projected/9da17217-b666-43b4-8061-84cdd4cedeac-kube-api-access-t82px\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.758139 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.758157 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.758183 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-config\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.758203 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.759025 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.759581 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.760349 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.760446 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.760621 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-config\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.790903 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t82px\" (UniqueName: \"kubernetes.io/projected/9da17217-b666-43b4-8061-84cdd4cedeac-kube-api-access-t82px\") pod \"dnsmasq-dns-56df8fb6b7-gzfqq\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:56 crc kubenswrapper[4879]: I1125 14:48:56.823606 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.399313 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.401350 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.405932 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-qssh2" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.408644 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.408865 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.426989 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:48:57 crc kubenswrapper[4879]: W1125 14:48:57.453528 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9da17217_b666_43b4_8061_84cdd4cedeac.slice/crio-f3b991d5b35d7f8aa9f550f5fd03581f9d736b865e7df3060ec18a5895e45bbd WatchSource:0}: Error finding container f3b991d5b35d7f8aa9f550f5fd03581f9d736b865e7df3060ec18a5895e45bbd: Status 404 returned error can't find the container with id f3b991d5b35d7f8aa9f550f5fd03581f9d736b865e7df3060ec18a5895e45bbd Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.472259 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-gzfqq"] Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.473171 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.473215 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.473263 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.474206 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t55sh\" (UniqueName: \"kubernetes.io/projected/bedfe9b0-4b53-491c-aa21-b7a72adadff4-kube-api-access-t55sh\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.474289 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-scripts\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.474449 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-logs\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.474549 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-config-data\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576179 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576257 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576301 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576353 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t55sh\" (UniqueName: \"kubernetes.io/projected/bedfe9b0-4b53-491c-aa21-b7a72adadff4-kube-api-access-t55sh\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576387 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-scripts\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576447 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-logs\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576490 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-config-data\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.576880 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.578296 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.578357 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-logs\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.581660 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-scripts\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.582534 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.584154 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-config-data\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.597772 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t55sh\" (UniqueName: \"kubernetes.io/projected/bedfe9b0-4b53-491c-aa21-b7a72adadff4-kube-api-access-t55sh\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.609190 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.677210 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.679819 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.683933 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.712882 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.737174 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.880736 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.880794 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.880814 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.881166 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.881280 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnk5k\" (UniqueName: \"kubernetes.io/projected/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-kube-api-access-hnk5k\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.881317 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.881435 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-logs\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982750 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-logs\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982840 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982879 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982899 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982935 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982971 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnk5k\" (UniqueName: \"kubernetes.io/projected/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-kube-api-access-hnk5k\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.982995 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.984106 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-logs\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.985502 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.986624 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.990733 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.990883 4879 generic.go:334] "Generic (PLEG): container finished" podID="9da17217-b666-43b4-8061-84cdd4cedeac" containerID="ddd11c35fb60cfa566240ed27ace139dbeccacdd9b30b3ea18f885624d4e09ee" exitCode=0 Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.990906 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" event={"ID":"9da17217-b666-43b4-8061-84cdd4cedeac","Type":"ContainerDied","Data":"ddd11c35fb60cfa566240ed27ace139dbeccacdd9b30b3ea18f885624d4e09ee"} Nov 25 14:48:57 crc kubenswrapper[4879]: I1125 14:48:57.991029 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" event={"ID":"9da17217-b666-43b4-8061-84cdd4cedeac","Type":"ContainerStarted","Data":"f3b991d5b35d7f8aa9f550f5fd03581f9d736b865e7df3060ec18a5895e45bbd"} Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:57.997887 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zlv8f" event={"ID":"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e","Type":"ContainerStarted","Data":"91d3c832a6ed865c0a273e270acf54ffd2d958b4f067f40712a077d4845de570"} Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.000805 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.007701 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnk5k\" (UniqueName: \"kubernetes.io/projected/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-kube-api-access-hnk5k\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.015721 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.039014 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-zlv8f" podStartSLOduration=2.951876766 podStartE2EDuration="1m55.038961899s" podCreationTimestamp="2025-11-25 14:47:03 +0000 UTC" firstStartedPulling="2025-11-25 14:47:05.045268571 +0000 UTC m=+1316.648681642" lastFinishedPulling="2025-11-25 14:48:57.132353704 +0000 UTC m=+1428.735766775" observedRunningTime="2025-11-25 14:48:58.03356542 +0000 UTC m=+1429.636978511" watchObservedRunningTime="2025-11-25 14:48:58.038961899 +0000 UTC m=+1429.642374970" Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.054255 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.066787 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.293175 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:48:58 crc kubenswrapper[4879]: W1125 14:48:58.309744 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbedfe9b0_4b53_491c_aa21_b7a72adadff4.slice/crio-199546dfc73ca8025a4e099f7b74a8c7d81a8c4b0b0c1b638c2b859716cdc608 WatchSource:0}: Error finding container 199546dfc73ca8025a4e099f7b74a8c7d81a8c4b0b0c1b638c2b859716cdc608: Status 404 returned error can't find the container with id 199546dfc73ca8025a4e099f7b74a8c7d81a8c4b0b0c1b638c2b859716cdc608 Nov 25 14:48:58 crc kubenswrapper[4879]: I1125 14:48:58.733681 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.015268 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bedfe9b0-4b53-491c-aa21-b7a72adadff4","Type":"ContainerStarted","Data":"199546dfc73ca8025a4e099f7b74a8c7d81a8c4b0b0c1b638c2b859716cdc608"} Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.018222 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a","Type":"ContainerStarted","Data":"85f87637700726e95fa04e2d5e33ce163738f71f531f8926e71aceae7254764b"} Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.022069 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" event={"ID":"9da17217-b666-43b4-8061-84cdd4cedeac","Type":"ContainerStarted","Data":"fd2926ff8937b1c81e60f15d2f7dd9934446409095fca0316db0a6cc9bd87c41"} Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.022435 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.051071 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" podStartSLOduration=3.05104912 podStartE2EDuration="3.05104912s" podCreationTimestamp="2025-11-25 14:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:48:59.049061356 +0000 UTC m=+1430.652474427" watchObservedRunningTime="2025-11-25 14:48:59.05104912 +0000 UTC m=+1430.654462191" Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.121158 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:48:59 crc kubenswrapper[4879]: I1125 14:48:59.224386 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.049325 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bedfe9b0-4b53-491c-aa21-b7a72adadff4","Type":"ContainerStarted","Data":"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b"} Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.049667 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bedfe9b0-4b53-491c-aa21-b7a72adadff4","Type":"ContainerStarted","Data":"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169"} Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.049397 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-log" containerID="cri-o://f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169" gracePeriod=30 Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.049838 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-httpd" containerID="cri-o://e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b" gracePeriod=30 Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.058605 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a","Type":"ContainerStarted","Data":"801e473a6f2214554616ef9812793067555ccbc0370ec7d91d501e91c62d95a5"} Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.079838 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.085752 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.085729953 podStartE2EDuration="4.085729953s" podCreationTimestamp="2025-11-25 14:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:49:00.075939954 +0000 UTC m=+1431.679353045" watchObservedRunningTime="2025-11-25 14:49:00.085729953 +0000 UTC m=+1431.689143024" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.275469 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.277205 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.278607 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.279070 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.279098 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-45gtr" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.296492 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.446768 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hklf2\" (UniqueName: \"kubernetes.io/projected/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-kube-api-access-hklf2\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.447644 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.447826 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.447906 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config-secret\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.549985 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.550030 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config-secret\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.550201 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hklf2\" (UniqueName: \"kubernetes.io/projected/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-kube-api-access-hklf2\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.550222 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.553944 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.558341 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config-secret\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.559181 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.574693 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hklf2\" (UniqueName: \"kubernetes.io/projected/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-kube-api-access-hklf2\") pod \"openstackclient\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.597009 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 14:49:00 crc kubenswrapper[4879]: I1125 14:49:00.965810 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.070078 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a","Type":"ContainerStarted","Data":"b915d11f38df2247845034f72a0be6b90c3c1107c5cf5482628832aa33892692"} Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.070229 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-httpd" containerID="cri-o://b915d11f38df2247845034f72a0be6b90c3c1107c5cf5482628832aa33892692" gracePeriod=30 Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.070207 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-log" containerID="cri-o://801e473a6f2214554616ef9812793067555ccbc0370ec7d91d501e91c62d95a5" gracePeriod=30 Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074328 4879 generic.go:334] "Generic (PLEG): container finished" podID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerID="e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b" exitCode=143 Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074362 4879 generic.go:334] "Generic (PLEG): container finished" podID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerID="f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169" exitCode=143 Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074388 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bedfe9b0-4b53-491c-aa21-b7a72adadff4","Type":"ContainerDied","Data":"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b"} Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074422 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bedfe9b0-4b53-491c-aa21-b7a72adadff4","Type":"ContainerDied","Data":"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169"} Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074436 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bedfe9b0-4b53-491c-aa21-b7a72adadff4","Type":"ContainerDied","Data":"199546dfc73ca8025a4e099f7b74a8c7d81a8c4b0b0c1b638c2b859716cdc608"} Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074454 4879 scope.go:117] "RemoveContainer" containerID="e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.074505 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.093883 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.093864326 podStartE2EDuration="5.093864326s" podCreationTimestamp="2025-11-25 14:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:49:01.09181802 +0000 UTC m=+1432.695231091" watchObservedRunningTime="2025-11-25 14:49:01.093864326 +0000 UTC m=+1432.697277397" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169252 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-httpd-run\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169331 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-logs\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169364 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-scripts\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169469 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-config-data\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169649 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-combined-ca-bundle\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169714 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.169734 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t55sh\" (UniqueName: \"kubernetes.io/projected/bedfe9b0-4b53-491c-aa21-b7a72adadff4-kube-api-access-t55sh\") pod \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\" (UID: \"bedfe9b0-4b53-491c-aa21-b7a72adadff4\") " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.180323 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bedfe9b0-4b53-491c-aa21-b7a72adadff4-kube-api-access-t55sh" (OuterVolumeSpecName: "kube-api-access-t55sh") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "kube-api-access-t55sh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.186624 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.272724 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.272761 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t55sh\" (UniqueName: \"kubernetes.io/projected/bedfe9b0-4b53-491c-aa21-b7a72adadff4-kube-api-access-t55sh\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.292495 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.296109 4879 scope.go:117] "RemoveContainer" containerID="f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.341239 4879 scope.go:117] "RemoveContainer" containerID="e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b" Nov 25 14:49:01 crc kubenswrapper[4879]: E1125 14:49:01.341647 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b\": container with ID starting with e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b not found: ID does not exist" containerID="e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.341674 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b"} err="failed to get container status \"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b\": rpc error: code = NotFound desc = could not find container \"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b\": container with ID starting with e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b not found: ID does not exist" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.341772 4879 scope.go:117] "RemoveContainer" containerID="f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169" Nov 25 14:49:01 crc kubenswrapper[4879]: E1125 14:49:01.342353 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169\": container with ID starting with f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169 not found: ID does not exist" containerID="f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.342378 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169"} err="failed to get container status \"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169\": rpc error: code = NotFound desc = could not find container \"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169\": container with ID starting with f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169 not found: ID does not exist" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.342397 4879 scope.go:117] "RemoveContainer" containerID="e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.342692 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b"} err="failed to get container status \"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b\": rpc error: code = NotFound desc = could not find container \"e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b\": container with ID starting with e1fb26c04acd6ca588e142a88002a0e65440fa9ff59b91a1fe1aa64096b2955b not found: ID does not exist" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.342709 4879 scope.go:117] "RemoveContainer" containerID="f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.342911 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169"} err="failed to get container status \"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169\": rpc error: code = NotFound desc = could not find container \"f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169\": container with ID starting with f4c66a4c8fabc5f16f051651469781c1c93c180c9cb83daed7900425dca0a169 not found: ID does not exist" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.345586 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.345621 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-logs" (OuterVolumeSpecName: "logs") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.347104 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-scripts" (OuterVolumeSpecName: "scripts") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.347158 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.347241 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-config-data" (OuterVolumeSpecName: "config-data") pod "bedfe9b0-4b53-491c-aa21-b7a72adadff4" (UID: "bedfe9b0-4b53-491c-aa21-b7a72adadff4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.374262 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.374295 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.374305 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bedfe9b0-4b53-491c-aa21-b7a72adadff4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.374318 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.374326 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.374334 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bedfe9b0-4b53-491c-aa21-b7a72adadff4-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.380269 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 25 14:49:01 crc kubenswrapper[4879]: W1125 14:49:01.389420 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode86acc2c_393a_40eb_b8bb_1cda1ef3c298.slice/crio-fa3e88cc66efee697e9e92ddf8d9485182ba9148ab3207fbae1b4731a45990b1 WatchSource:0}: Error finding container fa3e88cc66efee697e9e92ddf8d9485182ba9148ab3207fbae1b4731a45990b1: Status 404 returned error can't find the container with id fa3e88cc66efee697e9e92ddf8d9485182ba9148ab3207fbae1b4731a45990b1 Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.409316 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.416711 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.424088 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:49:01 crc kubenswrapper[4879]: E1125 14:49:01.424656 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-log" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.424724 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-log" Nov 25 14:49:01 crc kubenswrapper[4879]: E1125 14:49:01.424783 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-httpd" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.424844 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-httpd" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.425105 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-httpd" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.425223 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" containerName="glance-log" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.426198 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.430960 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.431225 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.453249 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576635 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576725 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-scripts\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576753 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576781 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576805 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtqmr\" (UniqueName: \"kubernetes.io/projected/dcc96323-05d7-4a47-9f3d-422508fcabcc-kube-api-access-vtqmr\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576845 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576899 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-config-data\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.576928 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-logs\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.656787 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bedfe9b0-4b53-491c-aa21-b7a72adadff4" path="/var/lib/kubelet/pods/bedfe9b0-4b53-491c-aa21-b7a72adadff4/volumes" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.678241 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.678327 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-scripts\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.678362 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.678391 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.678606 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.679339 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.680285 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtqmr\" (UniqueName: \"kubernetes.io/projected/dcc96323-05d7-4a47-9f3d-422508fcabcc-kube-api-access-vtqmr\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.680412 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.680919 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-config-data\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.681391 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-logs\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.682071 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-logs\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.683486 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-scripts\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.685678 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.686378 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.693216 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-config-data\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.697829 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtqmr\" (UniqueName: \"kubernetes.io/projected/dcc96323-05d7-4a47-9f3d-422508fcabcc-kube-api-access-vtqmr\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.713536 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " pod="openstack/glance-default-external-api-0" Nov 25 14:49:01 crc kubenswrapper[4879]: I1125 14:49:01.752161 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:49:02 crc kubenswrapper[4879]: I1125 14:49:02.085692 4879 generic.go:334] "Generic (PLEG): container finished" podID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerID="b915d11f38df2247845034f72a0be6b90c3c1107c5cf5482628832aa33892692" exitCode=0 Nov 25 14:49:02 crc kubenswrapper[4879]: I1125 14:49:02.085958 4879 generic.go:334] "Generic (PLEG): container finished" podID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerID="801e473a6f2214554616ef9812793067555ccbc0370ec7d91d501e91c62d95a5" exitCode=143 Nov 25 14:49:02 crc kubenswrapper[4879]: I1125 14:49:02.085770 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a","Type":"ContainerDied","Data":"b915d11f38df2247845034f72a0be6b90c3c1107c5cf5482628832aa33892692"} Nov 25 14:49:02 crc kubenswrapper[4879]: I1125 14:49:02.086031 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a","Type":"ContainerDied","Data":"801e473a6f2214554616ef9812793067555ccbc0370ec7d91d501e91c62d95a5"} Nov 25 14:49:02 crc kubenswrapper[4879]: I1125 14:49:02.087545 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e86acc2c-393a-40eb-b8bb-1cda1ef3c298","Type":"ContainerStarted","Data":"fa3e88cc66efee697e9e92ddf8d9485182ba9148ab3207fbae1b4731a45990b1"} Nov 25 14:49:02 crc kubenswrapper[4879]: I1125 14:49:02.318390 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.098822 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"dcc96323-05d7-4a47-9f3d-422508fcabcc","Type":"ContainerStarted","Data":"8752de461733c913b130b4f5ca752cd2a79ae1df38c85c652ff49f70679c1551"} Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.468089 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614287 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-combined-ca-bundle\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614344 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-config-data\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614361 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-scripts\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614415 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-logs\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614504 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-httpd-run\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614522 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnk5k\" (UniqueName: \"kubernetes.io/projected/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-kube-api-access-hnk5k\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.614542 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\" (UID: \"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a\") " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.616149 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-logs" (OuterVolumeSpecName: "logs") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.616235 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.618695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.619410 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-scripts" (OuterVolumeSpecName: "scripts") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.620336 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-kube-api-access-hnk5k" (OuterVolumeSpecName: "kube-api-access-hnk5k") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "kube-api-access-hnk5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.644595 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.683775 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-config-data" (OuterVolumeSpecName: "config-data") pod "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" (UID: "2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716827 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716874 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716885 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716897 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716906 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716917 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnk5k\" (UniqueName: \"kubernetes.io/projected/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a-kube-api-access-hnk5k\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.716965 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.759377 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.821098 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.883612 4879 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.884062 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-httpd" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.884084 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-httpd" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.884100 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-log" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.884109 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-log" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.884359 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-log" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.884379 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" containerName="glance-httpd" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885026 4879 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885271 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885434 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55" gracePeriod=15 Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885470 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd" gracePeriod=15 Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885575 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a" gracePeriod=15 Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885577 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09" gracePeriod=15 Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885619 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d" gracePeriod=15 Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.885960 4879 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886427 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886446 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886482 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886493 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886518 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886528 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886539 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886546 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886566 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886574 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886592 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886599 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.886609 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886617 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886851 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886882 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886895 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886906 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886916 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.886927 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 25 14:49:03 crc kubenswrapper[4879]: E1125 14:49:03.887189 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.887202 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.887402 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 14:49:03 crc kubenswrapper[4879]: I1125 14:49:03.937652 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024287 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024348 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024377 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024455 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024480 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024689 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024787 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.024903 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.115840 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a","Type":"ContainerDied","Data":"85f87637700726e95fa04e2d5e33ce163738f71f531f8926e71aceae7254764b"} Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.115911 4879 scope.go:117] "RemoveContainer" containerID="b915d11f38df2247845034f72a0be6b90c3c1107c5cf5482628832aa33892692" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.116087 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.117087 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.117727 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.118101 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.119057 4879 generic.go:334] "Generic (PLEG): container finished" podID="211ea349-e87d-41a4-aa0e-e07b5cd25946" containerID="1109de26864c0e4e71e7a66146bc7c09399b59c794ad54e48ec6374d8352ffc9" exitCode=0 Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.119141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"211ea349-e87d-41a4-aa0e-e07b5cd25946","Type":"ContainerDied","Data":"1109de26864c0e4e71e7a66146bc7c09399b59c794ad54e48ec6374d8352ffc9"} Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.120348 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.120745 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.121099 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.121306 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.125294 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126340 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126402 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126457 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126463 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126482 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126506 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126499 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126484 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126505 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126662 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126855 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.126911 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.127010 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.127088 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.127250 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.128078 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09" exitCode=0 Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.128109 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd" exitCode=0 Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.128138 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d" exitCode=0 Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.128152 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a" exitCode=2 Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.134459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"dcc96323-05d7-4a47-9f3d-422508fcabcc","Type":"ContainerStarted","Data":"7577298483f3d806f50edb174d215875489985db1478c5c365e6375cc6cc5ed0"} Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.134512 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"dcc96323-05d7-4a47-9f3d-422508fcabcc","Type":"ContainerStarted","Data":"1e6667397c097692100efe4ff452f53f8325243348eea48e838c523168da37a7"} Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.135781 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.136341 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.136750 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.138927 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.139385 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.140008 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.140410 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.140700 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.141010 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.141431 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:04 crc kubenswrapper[4879]: E1125 14:49:04.150808 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.168189 4879 scope.go:117] "RemoveContainer" containerID="801e473a6f2214554616ef9812793067555ccbc0370ec7d91d501e91c62d95a5" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.189556 4879 scope.go:117] "RemoveContainer" containerID="2c80989fb2d4ee04c3111f245b158895b79030a908fc482a4668a11bf10d86e6" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.221384 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:04 crc kubenswrapper[4879]: W1125 14:49:04.275961 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-f9601b95fcde19e4146e0e48c1d172c7a441be10bc1c91737d9f5bae10fc3f56 WatchSource:0}: Error finding container f9601b95fcde19e4146e0e48c1d172c7a441be10bc1c91737d9f5bae10fc3f56: Status 404 returned error can't find the container with id f9601b95fcde19e4146e0e48c1d172c7a441be10bc1c91737d9f5bae10fc3f56 Nov 25 14:49:04 crc kubenswrapper[4879]: E1125 14:49:04.278553 4879 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187b475d1c4cbcf4 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 14:49:04.278002932 +0000 UTC m=+1435.881416003,LastTimestamp:2025-11-25 14:49:04.278002932 +0000 UTC m=+1435.881416003,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.831735 4879 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Nov 25 14:49:04 crc kubenswrapper[4879]: I1125 14:49:04.832137 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.152497 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4"} Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.152554 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f9601b95fcde19e4146e0e48c1d172c7a441be10bc1c91737d9f5bae10fc3f56"} Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.153357 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.153776 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.154051 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.154354 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.155191 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.160348 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.178479 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.388637 4879 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.388862 4879 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.389153 4879 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.389638 4879 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.390055 4879 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.390083 4879 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.390391 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="200ms" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.494190 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.494828 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.495359 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.495847 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.496162 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.496737 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.591647 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="400ms" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.652679 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-var-lock\") pod \"211ea349-e87d-41a4-aa0e-e07b5cd25946\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.652733 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-kubelet-dir\") pod \"211ea349-e87d-41a4-aa0e-e07b5cd25946\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.652798 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/211ea349-e87d-41a4-aa0e-e07b5cd25946-kube-api-access\") pod \"211ea349-e87d-41a4-aa0e-e07b5cd25946\" (UID: \"211ea349-e87d-41a4-aa0e-e07b5cd25946\") " Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.652871 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-var-lock" (OuterVolumeSpecName: "var-lock") pod "211ea349-e87d-41a4-aa0e-e07b5cd25946" (UID: "211ea349-e87d-41a4-aa0e-e07b5cd25946"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.652909 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "211ea349-e87d-41a4-aa0e-e07b5cd25946" (UID: "211ea349-e87d-41a4-aa0e-e07b5cd25946"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.653277 4879 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.653301 4879 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/211ea349-e87d-41a4-aa0e-e07b5cd25946-var-lock\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.658778 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/211ea349-e87d-41a4-aa0e-e07b5cd25946-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "211ea349-e87d-41a4-aa0e-e07b5cd25946" (UID: "211ea349-e87d-41a4-aa0e-e07b5cd25946"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:05 crc kubenswrapper[4879]: I1125 14:49:05.755187 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/211ea349-e87d-41a4-aa0e-e07b5cd25946-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:05 crc kubenswrapper[4879]: E1125 14:49:05.993684 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="800ms" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.189225 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.189547 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"211ea349-e87d-41a4-aa0e-e07b5cd25946","Type":"ContainerDied","Data":"c1c607d6dda4f3f4a7c9021b5c36e9cd186b318dfc56941017d82677d5de16f4"} Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.189584 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1c607d6dda4f3f4a7c9021b5c36e9cd186b318dfc56941017d82677d5de16f4" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.198254 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.198753 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.199272 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.199884 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.586273 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.587272 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.588108 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.588416 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.588634 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.588885 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.589154 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.681627 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.681871 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.681930 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.682673 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.682736 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.682758 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.784036 4879 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.784094 4879 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.784103 4879 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:06 crc kubenswrapper[4879]: E1125 14:49:06.794731 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="1.6s" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.826386 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.827351 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.827697 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.828159 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.829276 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.829916 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:06 crc kubenswrapper[4879]: I1125 14:49:06.830325 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.201452 4879 generic.go:334] "Generic (PLEG): container finished" podID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" containerID="91d3c832a6ed865c0a273e270acf54ffd2d958b4f067f40712a077d4845de570" exitCode=0 Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.201535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zlv8f" event={"ID":"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e","Type":"ContainerDied","Data":"91d3c832a6ed865c0a273e270acf54ffd2d958b4f067f40712a077d4845de570"} Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.202196 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.202441 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.202659 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.202888 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.203112 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.203402 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.203640 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.206293 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.208188 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55" exitCode=0 Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.208268 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.208320 4879 scope.go:117] "RemoveContainer" containerID="3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.216673 4879 generic.go:334] "Generic (PLEG): container finished" podID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" containerID="4e612833ae0b26c1698789035216d1a5fc8ea3d9745279c425d6aa0e79629e25" exitCode=0 Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.216724 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-b29mr" event={"ID":"41a3bd8d-a2e0-401e-b2f6-10f076e3710e","Type":"ContainerDied","Data":"4e612833ae0b26c1698789035216d1a5fc8ea3d9745279c425d6aa0e79629e25"} Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.218945 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.219303 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.219658 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.220548 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.220786 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.221044 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.221941 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.222247 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.234069 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.234855 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.235324 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.235593 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.236181 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.236545 4879 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.236951 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.237449 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:07 crc kubenswrapper[4879]: I1125 14:49:07.658880 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Nov 25 14:49:08 crc kubenswrapper[4879]: E1125 14:49:08.396363 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="3.2s" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.651088 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.651852 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.652088 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.652301 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.652494 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.653074 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.653970 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:09 crc kubenswrapper[4879]: I1125 14:49:09.654345 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:10 crc kubenswrapper[4879]: I1125 14:49:10.910578 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.214669 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.215588 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.216002 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.216449 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.216747 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.216974 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.217320 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.217625 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.217905 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.218173 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.265011 4879 generic.go:334] "Generic (PLEG): container finished" podID="ed169ce2-81b3-4579-8f37-f45052a7b15d" containerID="d803fecf629b7d3ed726ab476ece719d4ac8a1643c196150048521897f49ce98" exitCode=0 Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.265048 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bstpg" event={"ID":"ed169ce2-81b3-4579-8f37-f45052a7b15d","Type":"ContainerDied","Data":"d803fecf629b7d3ed726ab476ece719d4ac8a1643c196150048521897f49ce98"} Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.265674 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.265909 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.266109 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.266330 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.266530 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.266736 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.266935 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.267193 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.267392 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.267597 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.597435 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="6.4s" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.619022 4879 scope.go:117] "RemoveContainer" containerID="ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.735772 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zlv8f" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.736574 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.736763 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.736926 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.737104 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.737330 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.737496 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.737642 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.737798 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.737965 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.738248 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.743101 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-b29mr" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.743729 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.744022 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.744267 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.744457 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.744642 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.744821 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.745194 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.745381 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.745578 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.745946 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.752482 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.752534 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.755869 4879 scope.go:117] "RemoveContainer" containerID="b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.778226 4879 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.190:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187b475d1c4cbcf4 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 14:49:04.278002932 +0000 UTC m=+1435.881416003,LastTimestamp:2025-11-25 14:49:04.278002932 +0000 UTC m=+1435.881416003,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.781717 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.782245 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.782472 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.782677 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.782863 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.783024 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.783210 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.783405 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.783558 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.783739 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.783914 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.794229 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.794849 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.795263 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.795833 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.796110 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.796354 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.796548 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.796796 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.797114 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.797476 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.797788 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.810315 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.841408 4879 scope.go:117] "RemoveContainer" containerID="d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.861387 4879 scope.go:117] "RemoveContainer" containerID="dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872466 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-combined-ca-bundle\") pod \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872585 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-logs\") pod \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872683 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-scripts\") pod \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872771 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-db-sync-config-data\") pod \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872813 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lgxv\" (UniqueName: \"kubernetes.io/projected/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-kube-api-access-6lgxv\") pod \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\" (UID: \"41a3bd8d-a2e0-401e-b2f6-10f076e3710e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872842 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-config-data\") pod \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872883 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-combined-ca-bundle\") pod \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.872909 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5h62\" (UniqueName: \"kubernetes.io/projected/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-kube-api-access-w5h62\") pod \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\" (UID: \"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e\") " Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.874237 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-logs" (OuterVolumeSpecName: "logs") pod "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" (UID: "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.878470 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-kube-api-access-w5h62" (OuterVolumeSpecName: "kube-api-access-w5h62") pod "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" (UID: "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e"). InnerVolumeSpecName "kube-api-access-w5h62". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.878980 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-scripts" (OuterVolumeSpecName: "scripts") pod "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" (UID: "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.879437 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "41a3bd8d-a2e0-401e-b2f6-10f076e3710e" (UID: "41a3bd8d-a2e0-401e-b2f6-10f076e3710e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.880529 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-kube-api-access-6lgxv" (OuterVolumeSpecName: "kube-api-access-6lgxv") pod "41a3bd8d-a2e0-401e-b2f6-10f076e3710e" (UID: "41a3bd8d-a2e0-401e-b2f6-10f076e3710e"). InnerVolumeSpecName "kube-api-access-6lgxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.882085 4879 scope.go:117] "RemoveContainer" containerID="100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.907768 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41a3bd8d-a2e0-401e-b2f6-10f076e3710e" (UID: "41a3bd8d-a2e0-401e-b2f6-10f076e3710e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.912204 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" (UID: "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.912577 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-config-data" (OuterVolumeSpecName: "config-data") pod "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" (UID: "b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975006 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975051 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5h62\" (UniqueName: \"kubernetes.io/projected/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-kube-api-access-w5h62\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975062 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975074 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975086 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975098 4879 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975105 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lgxv\" (UniqueName: \"kubernetes.io/projected/41a3bd8d-a2e0-401e-b2f6-10f076e3710e-kube-api-access-6lgxv\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.975113 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.992879 4879 scope.go:117] "RemoveContainer" containerID="3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.993780 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\": container with ID starting with 3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09 not found: ID does not exist" containerID="3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.993817 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09"} err="failed to get container status \"3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\": rpc error: code = NotFound desc = could not find container \"3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09\": container with ID starting with 3ff865758933a5d6b4bd6c408da55c3498b547c620ed64ac88a2e4219f1d1d09 not found: ID does not exist" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.993843 4879 scope.go:117] "RemoveContainer" containerID="ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.994216 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\": container with ID starting with ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd not found: ID does not exist" containerID="ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.994242 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd"} err="failed to get container status \"ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\": rpc error: code = NotFound desc = could not find container \"ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd\": container with ID starting with ce74a26d6f045f07410f0f6f0518f86988c64f925bd743b5f308ae90976db5fd not found: ID does not exist" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.994260 4879 scope.go:117] "RemoveContainer" containerID="b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.994530 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\": container with ID starting with b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d not found: ID does not exist" containerID="b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.994566 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d"} err="failed to get container status \"b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\": rpc error: code = NotFound desc = could not find container \"b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d\": container with ID starting with b18aa3bbe9afacd5e4c22f5e978bb39177381c80daf9dcd97fba40433a36364d not found: ID does not exist" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.994590 4879 scope.go:117] "RemoveContainer" containerID="d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.994892 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\": container with ID starting with d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a not found: ID does not exist" containerID="d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.994926 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a"} err="failed to get container status \"d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\": rpc error: code = NotFound desc = could not find container \"d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a\": container with ID starting with d796f6a100d4b561101f9f2a95c7fed02253e77342462173c3454cc5e385be2a not found: ID does not exist" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.994945 4879 scope.go:117] "RemoveContainer" containerID="dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.995272 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\": container with ID starting with dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55 not found: ID does not exist" containerID="dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.995302 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55"} err="failed to get container status \"dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\": rpc error: code = NotFound desc = could not find container \"dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55\": container with ID starting with dd1a56fc4e9ecd87f210d4a9c4736514468fe1360ce8e1ecf0095e5989d05d55 not found: ID does not exist" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.995324 4879 scope.go:117] "RemoveContainer" containerID="100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480" Nov 25 14:49:11 crc kubenswrapper[4879]: E1125 14:49:11.995708 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\": container with ID starting with 100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480 not found: ID does not exist" containerID="100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480" Nov 25 14:49:11 crc kubenswrapper[4879]: I1125 14:49:11.995736 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480"} err="failed to get container status \"100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\": rpc error: code = NotFound desc = could not find container \"100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480\": container with ID starting with 100c56be45b36b16f7677c4ffc227c38d4f4c2add2fc498c2f54095cc758c480 not found: ID does not exist" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.274741 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zlv8f" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.274748 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zlv8f" event={"ID":"b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e","Type":"ContainerDied","Data":"72bd3c883061d48e5af00eb66a5bd1ac4d57dbd3d7312a7d0dcd93e1bdce046a"} Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.274818 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72bd3c883061d48e5af00eb66a5bd1ac4d57dbd3d7312a7d0dcd93e1bdce046a" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.278603 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-b29mr" event={"ID":"41a3bd8d-a2e0-401e-b2f6-10f076e3710e","Type":"ContainerDied","Data":"6cb96ef7b4fd0a6caa5960b7389eb2eec5a09630c40d3dabded933bb4b914fbd"} Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.278659 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cb96ef7b4fd0a6caa5960b7389eb2eec5a09630c40d3dabded933bb4b914fbd" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.278809 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-b29mr" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.279185 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.279268 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.298075 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.299029 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.299425 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.299683 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.299886 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.300088 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.300324 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.300564 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.300783 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.301175 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.302115 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.302587 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.302792 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.302961 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.303113 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.303348 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.303575 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.303799 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.304021 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.304222 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: E1125 14:49:12.319562 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.560993 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bstpg" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.562249 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.562741 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.563463 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.564030 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.564418 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.564686 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.565089 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.565444 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.565688 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.566039 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.687462 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-scripts\") pod \"ed169ce2-81b3-4579-8f37-f45052a7b15d\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.687956 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-db-sync-config-data\") pod \"ed169ce2-81b3-4579-8f37-f45052a7b15d\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.687994 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed169ce2-81b3-4579-8f37-f45052a7b15d-etc-machine-id\") pod \"ed169ce2-81b3-4579-8f37-f45052a7b15d\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.688150 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwvs7\" (UniqueName: \"kubernetes.io/projected/ed169ce2-81b3-4579-8f37-f45052a7b15d-kube-api-access-jwvs7\") pod \"ed169ce2-81b3-4579-8f37-f45052a7b15d\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.688212 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-combined-ca-bundle\") pod \"ed169ce2-81b3-4579-8f37-f45052a7b15d\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.688268 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-config-data\") pod \"ed169ce2-81b3-4579-8f37-f45052a7b15d\" (UID: \"ed169ce2-81b3-4579-8f37-f45052a7b15d\") " Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.688377 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed169ce2-81b3-4579-8f37-f45052a7b15d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ed169ce2-81b3-4579-8f37-f45052a7b15d" (UID: "ed169ce2-81b3-4579-8f37-f45052a7b15d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.688910 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed169ce2-81b3-4579-8f37-f45052a7b15d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.694243 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ed169ce2-81b3-4579-8f37-f45052a7b15d" (UID: "ed169ce2-81b3-4579-8f37-f45052a7b15d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.694774 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-scripts" (OuterVolumeSpecName: "scripts") pod "ed169ce2-81b3-4579-8f37-f45052a7b15d" (UID: "ed169ce2-81b3-4579-8f37-f45052a7b15d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.694912 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed169ce2-81b3-4579-8f37-f45052a7b15d-kube-api-access-jwvs7" (OuterVolumeSpecName: "kube-api-access-jwvs7") pod "ed169ce2-81b3-4579-8f37-f45052a7b15d" (UID: "ed169ce2-81b3-4579-8f37-f45052a7b15d"). InnerVolumeSpecName "kube-api-access-jwvs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.716870 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed169ce2-81b3-4579-8f37-f45052a7b15d" (UID: "ed169ce2-81b3-4579-8f37-f45052a7b15d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.741705 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-config-data" (OuterVolumeSpecName: "config-data") pod "ed169ce2-81b3-4579-8f37-f45052a7b15d" (UID: "ed169ce2-81b3-4579-8f37-f45052a7b15d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.790206 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.790566 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.790670 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.790762 4879 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ed169ce2-81b3-4579-8f37-f45052a7b15d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:12 crc kubenswrapper[4879]: I1125 14:49:12.790824 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwvs7\" (UniqueName: \"kubernetes.io/projected/ed169ce2-81b3-4579-8f37-f45052a7b15d-kube-api-access-jwvs7\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.287068 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bstpg" event={"ID":"ed169ce2-81b3-4579-8f37-f45052a7b15d","Type":"ContainerDied","Data":"6ee5ed94ee0f6a2595cca6f49f4272207d1fd99affcbfc8ea4a9b67f5a9718b8"} Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.287110 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ee5ed94ee0f6a2595cca6f49f4272207d1fd99affcbfc8ea4a9b67f5a9718b8" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.287417 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bstpg" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.297535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e86acc2c-393a-40eb-b8bb-1cda1ef3c298","Type":"ContainerStarted","Data":"20f8e7a470342784d528dc91bc65940874d5348dbae47cccff1c3f3137264375"} Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.298175 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.298624 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.299610 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.300818 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.301325 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.301597 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.301763 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.301988 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.302251 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.302407 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.302704 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.302855 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.302999 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.303166 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.303316 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.303470 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.303621 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.303761 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.303904 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: I1125 14:49:13.304041 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:13 crc kubenswrapper[4879]: E1125 14:49:13.343696 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.306883 4879 generic.go:334] "Generic (PLEG): container finished" podID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" containerID="9bbe5f244d168f99dcc124a1fc0b095a0b5d75bd93006ace9ce81698b753e2f3" exitCode=1 Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.308046 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerDied","Data":"9bbe5f244d168f99dcc124a1fc0b095a0b5d75bd93006ace9ce81698b753e2f3"} Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.308377 4879 scope.go:117] "RemoveContainer" containerID="9bbe5f244d168f99dcc124a1fc0b095a0b5d75bd93006ace9ce81698b753e2f3" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.309320 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.309557 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.309778 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.309995 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.310235 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.310426 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.310641 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.312405 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.312686 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.313068 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.313476 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.439952 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.440442 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.440569 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.440912 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.441255 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.441263 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.441551 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.441797 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.442042 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.442383 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.442718 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.443004 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.443303 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.443703 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.444377 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.444635 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.444928 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.445203 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.445444 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.445725 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.445980 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.446287 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.446563 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.446791 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: I1125 14:49:14.447052 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:14 crc kubenswrapper[4879]: E1125 14:49:14.465906 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:15 crc kubenswrapper[4879]: E1125 14:49:15.382082 4879 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack/glance-glance-default-external-api-0: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/persistentvolumeclaims/glance-glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openstack/glance-default-external-api-0" volumeName="glance" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.328399 4879 generic.go:334] "Generic (PLEG): container finished" podID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" containerID="d6b30ee38c73ed38855be5f4bd952c64e53c799ec4b538b48bfbb66787e7ac81" exitCode=1 Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.328499 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerDied","Data":"d6b30ee38c73ed38855be5f4bd952c64e53c799ec4b538b48bfbb66787e7ac81"} Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.328956 4879 scope.go:117] "RemoveContainer" containerID="9bbe5f244d168f99dcc124a1fc0b095a0b5d75bd93006ace9ce81698b753e2f3" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.329946 4879 scope.go:117] "RemoveContainer" containerID="d6b30ee38c73ed38855be5f4bd952c64e53c799ec4b538b48bfbb66787e7ac81" Nov 25 14:49:16 crc kubenswrapper[4879]: E1125 14:49:16.330382 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=metallb-operator-controller-manager-7965d46465-b9w8p_metallb-system(1bf9f1b8-1476-4f3a-963b-986a0ae66426)\"" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.330587 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.331261 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.331708 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.332088 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.332538 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.332939 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.333333 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.333721 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.334078 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.334093 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.334309 4879 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf" exitCode=1 Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.334348 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf"} Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.334423 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.334735 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.335037 4879 scope.go:117] "RemoveContainer" containerID="8eef713e68e400b1c1742bc4561314c4771ed8652be02e8939c263c00f73afcf" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.335203 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.335793 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.336289 4879 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.336559 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.336782 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.337014 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.337827 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.338085 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.338308 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.338519 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.338733 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.338983 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.644389 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.645254 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.645733 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.646091 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.646412 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.646664 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.647316 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.647849 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.648172 4879 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.648404 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.648605 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.648794 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.649012 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.662062 4879 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.662095 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:16 crc kubenswrapper[4879]: E1125 14:49:16.662512 4879 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:16 crc kubenswrapper[4879]: I1125 14:49:16.663160 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:16 crc kubenswrapper[4879]: W1125 14:49:16.699921 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-709c556a74d6e3d9e3c1ad150927df0939713119a4a2d6f8864239ac2e20c002 WatchSource:0}: Error finding container 709c556a74d6e3d9e3c1ad150927df0939713119a4a2d6f8864239ac2e20c002: Status 404 returned error can't find the container with id 709c556a74d6e3d9e3c1ad150927df0939713119a4a2d6f8864239ac2e20c002 Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.365864 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.366219 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8048b811d5a02974901c60f574a6fb27d31bfd5bb78354b0021e368827182d69"} Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.367136 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.367577 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.368217 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.372910 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.373317 4879 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="960b75d9392339359a47c78ad10aaa11c5081f0617203e731778d32f67d8ce7c" exitCode=0 Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.373519 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.373753 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.373778 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"960b75d9392339359a47c78ad10aaa11c5081f0617203e731778d32f67d8ce7c"} Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.373972 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374179 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"709c556a74d6e3d9e3c1ad150927df0939713119a4a2d6f8864239ac2e20c002"} Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374228 4879 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374252 4879 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374272 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374465 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: E1125 14:49:17.374627 4879 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374688 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.374902 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.375261 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.380540 4879 status_manager.go:851] "Failed to get status for pod" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" pod="openstack/glance-default-internal-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-internal-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.380939 4879 status_manager.go:851] "Failed to get status for pod" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.381428 4879 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.381700 4879 status_manager.go:851] "Failed to get status for pod" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" pod="openstack/cinder-db-sync-bstpg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/cinder-db-sync-bstpg\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.382019 4879 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.382283 4879 status_manager.go:851] "Failed to get status for pod" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" pod="openstack/glance-default-external-api-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/glance-default-external-api-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.382653 4879 status_manager.go:851] "Failed to get status for pod" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" pod="openstack/placement-db-sync-zlv8f" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/placement-db-sync-zlv8f\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.382952 4879 status_manager.go:851] "Failed to get status for pod" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" pod="openstack/ceilometer-0" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/ceilometer-0\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.383352 4879 status_manager.go:851] "Failed to get status for pod" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" pod="openstack/barbican-db-sync-b29mr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/barbican-db-sync-b29mr\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.383609 4879 status_manager.go:851] "Failed to get status for pod" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" pod="openstack/openstackclient" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/openstackclient\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.383967 4879 status_manager.go:851] "Failed to get status for pod" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/metallb-system/pods/metallb-operator-controller-manager-7965d46465-b9w8p\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.384286 4879 status_manager.go:851] "Failed to get status for pod" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack/pods/dnsmasq-dns-56df8fb6b7-gzfqq\": dial tcp 38.102.83.190:6443: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.409279 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.409333 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.409402 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.410013 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c5933a5c56b3055e4c7db8e0eb320d1e1133844e1dbe1178c77e8edc7456d7cd"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:49:17 crc kubenswrapper[4879]: I1125 14:49:17.410081 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://c5933a5c56b3055e4c7db8e0eb320d1e1133844e1dbe1178c77e8edc7456d7cd" gracePeriod=600 Nov 25 14:49:17 crc kubenswrapper[4879]: E1125 14:49:17.998328 4879 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.190:6443: connect: connection refused" interval="7s" Nov 25 14:49:18 crc kubenswrapper[4879]: I1125 14:49:18.389230 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"debbd96975abbcc43ad2c89201eca4c04d5153d82d7aeff8c93852186bdeab14"} Nov 25 14:49:18 crc kubenswrapper[4879]: I1125 14:49:18.389282 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6ced7e5734f66e41ed62ddb0bd404b24a493f81f6f4ed6536b9107d824b98e87"} Nov 25 14:49:18 crc kubenswrapper[4879]: I1125 14:49:18.393634 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="c5933a5c56b3055e4c7db8e0eb320d1e1133844e1dbe1178c77e8edc7456d7cd" exitCode=0 Nov 25 14:49:18 crc kubenswrapper[4879]: I1125 14:49:18.393690 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"c5933a5c56b3055e4c7db8e0eb320d1e1133844e1dbe1178c77e8edc7456d7cd"} Nov 25 14:49:18 crc kubenswrapper[4879]: I1125 14:49:18.393751 4879 scope.go:117] "RemoveContainer" containerID="47970bca382c877b01ebc504c8bf019e55bd57a204827e1087ac93b9715656b7" Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.289466 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.298518 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.407409 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5"} Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.412827 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c1395567f0e5acbd660e751d38a95d1fb5091a7ba55a3eaab2a64c0a1131081d"} Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.412886 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"25709277dfbbef91d360c5b8011ca0d3d40d92a23d6fe233bb747919a45b106c"} Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.412904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c63e09230dc90aa8bfbcfc8144987e23dd3da01b7fb4a6f8fd457735e3c2ee95"} Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.413009 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.413177 4879 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:19 crc kubenswrapper[4879]: I1125 14:49:19.413204 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:20 crc kubenswrapper[4879]: I1125 14:49:20.423730 4879 generic.go:334] "Generic (PLEG): container finished" podID="284aa011-0c93-49d5-a07e-4580b44f1cdc" containerID="a7831c10e52dea800b5ac0b645f0ad3ee3a1384503fb815d37c65ab355043571" exitCode=0 Nov 25 14:49:20 crc kubenswrapper[4879]: I1125 14:49:20.424693 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-78w72" event={"ID":"284aa011-0c93-49d5-a07e-4580b44f1cdc","Type":"ContainerDied","Data":"a7831c10e52dea800b5ac0b645f0ad3ee3a1384503fb815d37c65ab355043571"} Nov 25 14:49:20 crc kubenswrapper[4879]: I1125 14:49:20.915611 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 25 14:49:21 crc kubenswrapper[4879]: I1125 14:49:21.664149 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:21 crc kubenswrapper[4879]: I1125 14:49:21.664526 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:21 crc kubenswrapper[4879]: I1125 14:49:21.670298 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:21 crc kubenswrapper[4879]: I1125 14:49:21.829850 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-78w72" Nov 25 14:49:22 crc kubenswrapper[4879]: I1125 14:49:22.442617 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-78w72" event={"ID":"284aa011-0c93-49d5-a07e-4580b44f1cdc","Type":"ContainerDied","Data":"2b49eece8a74ec12224e338e5f6f3ff8dd7f5e9d70ac66017c1b002268a7412b"} Nov 25 14:49:22 crc kubenswrapper[4879]: I1125 14:49:22.442966 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b49eece8a74ec12224e338e5f6f3ff8dd7f5e9d70ac66017c1b002268a7412b" Nov 25 14:49:22 crc kubenswrapper[4879]: I1125 14:49:22.443033 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-78w72" Nov 25 14:49:22 crc kubenswrapper[4879]: I1125 14:49:22.600857 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:49:22 crc kubenswrapper[4879]: I1125 14:49:22.601518 4879 scope.go:117] "RemoveContainer" containerID="d6b30ee38c73ed38855be5f4bd952c64e53c799ec4b538b48bfbb66787e7ac81" Nov 25 14:49:22 crc kubenswrapper[4879]: E1125 14:49:22.601802 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=metallb-operator-controller-manager-7965d46465-b9w8p_metallb-system(1bf9f1b8-1476-4f3a-963b-986a0ae66426)\"" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.760222 4879 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.816086 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-combined-ca-bundle\") pod \"284aa011-0c93-49d5-a07e-4580b44f1cdc\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.816841 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-config\") pod \"284aa011-0c93-49d5-a07e-4580b44f1cdc\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.816883 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46w7z\" (UniqueName: \"kubernetes.io/projected/284aa011-0c93-49d5-a07e-4580b44f1cdc-kube-api-access-46w7z\") pod \"284aa011-0c93-49d5-a07e-4580b44f1cdc\" (UID: \"284aa011-0c93-49d5-a07e-4580b44f1cdc\") " Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.842027 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284aa011-0c93-49d5-a07e-4580b44f1cdc-kube-api-access-46w7z" (OuterVolumeSpecName: "kube-api-access-46w7z") pod "284aa011-0c93-49d5-a07e-4580b44f1cdc" (UID: "284aa011-0c93-49d5-a07e-4580b44f1cdc"). InnerVolumeSpecName "kube-api-access-46w7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.856786 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-config" (OuterVolumeSpecName: "config") pod "284aa011-0c93-49d5-a07e-4580b44f1cdc" (UID: "284aa011-0c93-49d5-a07e-4580b44f1cdc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.870752 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "284aa011-0c93-49d5-a07e-4580b44f1cdc" (UID: "284aa011-0c93-49d5-a07e-4580b44f1cdc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.919781 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.919821 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/284aa011-0c93-49d5-a07e-4580b44f1cdc-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:24 crc kubenswrapper[4879]: I1125 14:49:24.919837 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46w7z\" (UniqueName: \"kubernetes.io/projected/284aa011-0c93-49d5-a07e-4580b44f1cdc-kube-api-access-46w7z\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.045354 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ce1cc68-0dec-4842-b08b-0b7749bcd083" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.468388 4879 generic.go:334] "Generic (PLEG): container finished" podID="89a61837-ab76-494d-a98d-268fed9bbe35" containerID="0b9bd2c2272b98f2159dae8ca1c2bf74fdbdec97908ea08773e05724c87145ea" exitCode=1 Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.469035 4879 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.469055 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.469321 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" event={"ID":"89a61837-ab76-494d-a98d-268fed9bbe35","Type":"ContainerDied","Data":"0b9bd2c2272b98f2159dae8ca1c2bf74fdbdec97908ea08773e05724c87145ea"} Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.469972 4879 scope.go:117] "RemoveContainer" containerID="0b9bd2c2272b98f2159dae8ca1c2bf74fdbdec97908ea08773e05724c87145ea" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.470910 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.482835 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:25 crc kubenswrapper[4879]: I1125 14:49:25.508797 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ce1cc68-0dec-4842-b08b-0b7749bcd083" Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.480213 4879 generic.go:334] "Generic (PLEG): container finished" podID="89a61837-ab76-494d-a98d-268fed9bbe35" containerID="9c8ba8fc79c4ec42beec4874612f9bd965b28b8bf60579933f658c9c44802114" exitCode=1 Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.480271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" event={"ID":"89a61837-ab76-494d-a98d-268fed9bbe35","Type":"ContainerDied","Data":"9c8ba8fc79c4ec42beec4874612f9bd965b28b8bf60579933f658c9c44802114"} Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.480631 4879 scope.go:117] "RemoveContainer" containerID="0b9bd2c2272b98f2159dae8ca1c2bf74fdbdec97908ea08773e05724c87145ea" Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.481406 4879 scope.go:117] "RemoveContainer" containerID="9c8ba8fc79c4ec42beec4874612f9bd965b28b8bf60579933f658c9c44802114" Nov 25 14:49:26 crc kubenswrapper[4879]: E1125 14:49:26.481775 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=keystone-operator-controller-manager-748dc6576f-5rjs9_openstack-operators(89a61837-ab76-494d-a98d-268fed9bbe35)\"" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" podUID="89a61837-ab76-494d-a98d-268fed9bbe35" Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.487176 4879 generic.go:334] "Generic (PLEG): container finished" podID="5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24" containerID="d6be5beba992af8d235932b1fe4bdd42a1b7688f7ee2c5b82ae044ba7780f8d9" exitCode=1 Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.487425 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" event={"ID":"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24","Type":"ContainerDied","Data":"d6be5beba992af8d235932b1fe4bdd42a1b7688f7ee2c5b82ae044ba7780f8d9"} Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.487573 4879 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.487589 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.488722 4879 scope.go:117] "RemoveContainer" containerID="d6be5beba992af8d235932b1fe4bdd42a1b7688f7ee2c5b82ae044ba7780f8d9" Nov 25 14:49:26 crc kubenswrapper[4879]: I1125 14:49:26.505921 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ce1cc68-0dec-4842-b08b-0b7749bcd083" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.135973 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.137394 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.497251 4879 generic.go:334] "Generic (PLEG): container finished" podID="94c3a712-5baa-4789-ad81-8d4c0554d84b" containerID="c61d69d1b9bb698e8ca414526867d5ba8969c43de2b2211136f35a4d65962982" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.497254 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" event={"ID":"94c3a712-5baa-4789-ad81-8d4c0554d84b","Type":"ContainerDied","Data":"c61d69d1b9bb698e8ca414526867d5ba8969c43de2b2211136f35a4d65962982"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.499231 4879 scope.go:117] "RemoveContainer" containerID="c61d69d1b9bb698e8ca414526867d5ba8969c43de2b2211136f35a4d65962982" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.503537 4879 generic.go:334] "Generic (PLEG): container finished" podID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" containerID="b68868d992497f15d66b4392e4402c457509eb6aa27993ea13fa11e819ba99fc" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.503603 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerDied","Data":"b68868d992497f15d66b4392e4402c457509eb6aa27993ea13fa11e819ba99fc"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.503968 4879 scope.go:117] "RemoveContainer" containerID="b68868d992497f15d66b4392e4402c457509eb6aa27993ea13fa11e819ba99fc" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.506067 4879 generic.go:334] "Generic (PLEG): container finished" podID="cefe5024-a03a-427e-84a5-a4f6eac64f12" containerID="03209e6d6dda01372cc565118ed0a6d94ff308b582a0a97b12f19fb9f7c90944" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.506172 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" event={"ID":"cefe5024-a03a-427e-84a5-a4f6eac64f12","Type":"ContainerDied","Data":"03209e6d6dda01372cc565118ed0a6d94ff308b582a0a97b12f19fb9f7c90944"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.506529 4879 scope.go:117] "RemoveContainer" containerID="03209e6d6dda01372cc565118ed0a6d94ff308b582a0a97b12f19fb9f7c90944" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.508710 4879 generic.go:334] "Generic (PLEG): container finished" podID="afbf9f55-3316-40bb-b53b-d4d96482f9d5" containerID="c4ee00e63a67ef390923f7545c3c2252a9280c145298e88892162d045a5adf0e" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.508775 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" event={"ID":"afbf9f55-3316-40bb-b53b-d4d96482f9d5","Type":"ContainerDied","Data":"c4ee00e63a67ef390923f7545c3c2252a9280c145298e88892162d045a5adf0e"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.509188 4879 scope.go:117] "RemoveContainer" containerID="c4ee00e63a67ef390923f7545c3c2252a9280c145298e88892162d045a5adf0e" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.510984 4879 generic.go:334] "Generic (PLEG): container finished" podID="b7d6b37e-0aff-4496-b240-7770d1d23827" containerID="6fe1066e6eaee9b2ac399fafd519cbabb8e8df7b52f5b54a3a0d61f803e71861" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.511047 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" event={"ID":"b7d6b37e-0aff-4496-b240-7770d1d23827","Type":"ContainerDied","Data":"6fe1066e6eaee9b2ac399fafd519cbabb8e8df7b52f5b54a3a0d61f803e71861"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.511637 4879 scope.go:117] "RemoveContainer" containerID="6fe1066e6eaee9b2ac399fafd519cbabb8e8df7b52f5b54a3a0d61f803e71861" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.515184 4879 generic.go:334] "Generic (PLEG): container finished" podID="5c199b9d-786f-4520-a7bb-67f616b16b88" containerID="5da5f3449d43923103b1d58e899281bdafddc60c7407c382787dbdba33070b28" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.515243 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" event={"ID":"5c199b9d-786f-4520-a7bb-67f616b16b88","Type":"ContainerDied","Data":"5da5f3449d43923103b1d58e899281bdafddc60c7407c382787dbdba33070b28"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.516334 4879 scope.go:117] "RemoveContainer" containerID="5da5f3449d43923103b1d58e899281bdafddc60c7407c382787dbdba33070b28" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.522639 4879 generic.go:334] "Generic (PLEG): container finished" podID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" containerID="2dc0817f3db5d45a26bafb93b0a9a549a0337190229d0aeb6a8ace1dc12bc769" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.522710 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerDied","Data":"2dc0817f3db5d45a26bafb93b0a9a549a0337190229d0aeb6a8ace1dc12bc769"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.523134 4879 scope.go:117] "RemoveContainer" containerID="2dc0817f3db5d45a26bafb93b0a9a549a0337190229d0aeb6a8ace1dc12bc769" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.525014 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" containerID="59dfc0b1de9583c9b896942444207c4ab9e0e1b47ec26aa98f1ff61547fdfe37" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.525064 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerDied","Data":"59dfc0b1de9583c9b896942444207c4ab9e0e1b47ec26aa98f1ff61547fdfe37"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.525435 4879 scope.go:117] "RemoveContainer" containerID="59dfc0b1de9583c9b896942444207c4ab9e0e1b47ec26aa98f1ff61547fdfe37" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.527855 4879 generic.go:334] "Generic (PLEG): container finished" podID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" containerID="6f796f780c99704b8944b1370e22ce207ec3fb2fd5dc0eeefc613feaea57e477" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.527891 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerDied","Data":"6f796f780c99704b8944b1370e22ce207ec3fb2fd5dc0eeefc613feaea57e477"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.528524 4879 scope.go:117] "RemoveContainer" containerID="6f796f780c99704b8944b1370e22ce207ec3fb2fd5dc0eeefc613feaea57e477" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.529904 4879 generic.go:334] "Generic (PLEG): container finished" podID="9d5bb254-3519-4805-bbfa-c4fad026bec1" containerID="5cb04c89c2f97f2f7504da2289fed6cedb609fa08a9639505d9674234da492c0" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.529926 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" event={"ID":"9d5bb254-3519-4805-bbfa-c4fad026bec1","Type":"ContainerDied","Data":"5cb04c89c2f97f2f7504da2289fed6cedb609fa08a9639505d9674234da492c0"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.530261 4879 scope.go:117] "RemoveContainer" containerID="5cb04c89c2f97f2f7504da2289fed6cedb609fa08a9639505d9674234da492c0" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.532664 4879 generic.go:334] "Generic (PLEG): container finished" podID="2e38fffb-d3ee-488d-bbc7-811d4ba43797" containerID="93267b192680033f46d35a8557b35b40337297941abdd3c618311a1e87e38090" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.532748 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" event={"ID":"2e38fffb-d3ee-488d-bbc7-811d4ba43797","Type":"ContainerDied","Data":"93267b192680033f46d35a8557b35b40337297941abdd3c618311a1e87e38090"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.533801 4879 scope.go:117] "RemoveContainer" containerID="93267b192680033f46d35a8557b35b40337297941abdd3c618311a1e87e38090" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.538990 4879 generic.go:334] "Generic (PLEG): container finished" podID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" containerID="8990e3b8a000b763557590fc073a778b9f0a2151f2cacf3f7c1cee8295a55769" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.539429 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerDied","Data":"8990e3b8a000b763557590fc073a778b9f0a2151f2cacf3f7c1cee8295a55769"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.540937 4879 scope.go:117] "RemoveContainer" containerID="8990e3b8a000b763557590fc073a778b9f0a2151f2cacf3f7c1cee8295a55769" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.547704 4879 generic.go:334] "Generic (PLEG): container finished" podID="230849f3-daef-4f23-9839-8f0bd76d8e4a" containerID="e11b3cc00fbcbf30f8ef921eff3eb4c6b508be12f95360187f09457b5a39a60a" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.547853 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerDied","Data":"e11b3cc00fbcbf30f8ef921eff3eb4c6b508be12f95360187f09457b5a39a60a"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.548391 4879 scope.go:117] "RemoveContainer" containerID="e11b3cc00fbcbf30f8ef921eff3eb4c6b508be12f95360187f09457b5a39a60a" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.553800 4879 generic.go:334] "Generic (PLEG): container finished" podID="81a1e752-3477-4e08-b151-874b0e503a1b" containerID="55b6a97a438917b8e123bbd1a64cb3e372526f39a06fc954aa7ea5075755dedb" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.553875 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerDied","Data":"55b6a97a438917b8e123bbd1a64cb3e372526f39a06fc954aa7ea5075755dedb"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.554525 4879 scope.go:117] "RemoveContainer" containerID="55b6a97a438917b8e123bbd1a64cb3e372526f39a06fc954aa7ea5075755dedb" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.563659 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" event={"ID":"5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24","Type":"ContainerStarted","Data":"e7b35e8b68d93334077fb9fd1b883aef5e2d95d32a9f9513bf8784ee4e3bc79b"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.563838 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.568843 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" containerID="159d936594bd06ac88ccc353452a14c27042af95c9a9f3daeecc4055b01156e5" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.568917 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerDied","Data":"159d936594bd06ac88ccc353452a14c27042af95c9a9f3daeecc4055b01156e5"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.569436 4879 scope.go:117] "RemoveContainer" containerID="159d936594bd06ac88ccc353452a14c27042af95c9a9f3daeecc4055b01156e5" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.572113 4879 generic.go:334] "Generic (PLEG): container finished" podID="be0d238d-5b08-42e1-ac21-4e00592ab433" containerID="d7650819c2f2f67dcf84527f9e5a4b89c132fef6e147dcc2fe04b3414b4a8f8a" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.572195 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerDied","Data":"d7650819c2f2f67dcf84527f9e5a4b89c132fef6e147dcc2fe04b3414b4a8f8a"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.572579 4879 scope.go:117] "RemoveContainer" containerID="d7650819c2f2f67dcf84527f9e5a4b89c132fef6e147dcc2fe04b3414b4a8f8a" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.575607 4879 generic.go:334] "Generic (PLEG): container finished" podID="279425db-228b-4697-864f-e50d2eb66012" containerID="5916db203b7bf14bbabf8bbda5a6a4f6b13a70d932f3391ed81231243ddf68bd" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.575707 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerDied","Data":"5916db203b7bf14bbabf8bbda5a6a4f6b13a70d932f3391ed81231243ddf68bd"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.576199 4879 scope.go:117] "RemoveContainer" containerID="5916db203b7bf14bbabf8bbda5a6a4f6b13a70d932f3391ed81231243ddf68bd" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.583681 4879 generic.go:334] "Generic (PLEG): container finished" podID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" containerID="a2f04fdcd428e81c5266cd6addfd05d0e2f215ed59ee47fdb5c11415427aa9e3" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.583748 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" event={"ID":"4ca6d024-306e-4707-abb0-1b57ed1e11b6","Type":"ContainerDied","Data":"a2f04fdcd428e81c5266cd6addfd05d0e2f215ed59ee47fdb5c11415427aa9e3"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.584257 4879 scope.go:117] "RemoveContainer" containerID="a2f04fdcd428e81c5266cd6addfd05d0e2f215ed59ee47fdb5c11415427aa9e3" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.587414 4879 generic.go:334] "Generic (PLEG): container finished" podID="c5c5776f-3970-425a-b5a7-c4c859f821e0" containerID="401f57fd2107333250f571a6331447e0a331329da55c67f02d5e2817483bf2a5" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.587458 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerDied","Data":"401f57fd2107333250f571a6331447e0a331329da55c67f02d5e2817483bf2a5"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.588367 4879 scope.go:117] "RemoveContainer" containerID="401f57fd2107333250f571a6331447e0a331329da55c67f02d5e2817483bf2a5" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.594068 4879 generic.go:334] "Generic (PLEG): container finished" podID="461b714a-4ee7-40ab-99d3-cd78552b52c6" containerID="61cb4c2adb1ddf2c09a4c875d136aee7431d83f27a1baee40e1375d5e47f658b" exitCode=1 Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.594329 4879 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.594346 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="a357275d-a98b-4ba6-8649-6367ecb5401f" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.594519 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerDied","Data":"61cb4c2adb1ddf2c09a4c875d136aee7431d83f27a1baee40e1375d5e47f658b"} Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.595035 4879 scope.go:117] "RemoveContainer" containerID="61cb4c2adb1ddf2c09a4c875d136aee7431d83f27a1baee40e1375d5e47f658b" Nov 25 14:49:27 crc kubenswrapper[4879]: I1125 14:49:27.872959 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8ce1cc68-0dec-4842-b08b-0b7749bcd083" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.605273 4879 generic.go:334] "Generic (PLEG): container finished" podID="b7d6b37e-0aff-4496-b240-7770d1d23827" containerID="33b75d3d991f21a884351432091d48418cb58acce93d56c4ce99e2bca63bae73" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.605339 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" event={"ID":"b7d6b37e-0aff-4496-b240-7770d1d23827","Type":"ContainerDied","Data":"33b75d3d991f21a884351432091d48418cb58acce93d56c4ce99e2bca63bae73"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.605610 4879 scope.go:117] "RemoveContainer" containerID="6fe1066e6eaee9b2ac399fafd519cbabb8e8df7b52f5b54a3a0d61f803e71861" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.606175 4879 scope.go:117] "RemoveContainer" containerID="33b75d3d991f21a884351432091d48418cb58acce93d56c4ce99e2bca63bae73" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.606407 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=openstack-operator-controller-manager-77bf44fb75-xdhlf_openstack-operators(b7d6b37e-0aff-4496-b240-7770d1d23827)\"" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" podUID="b7d6b37e-0aff-4496-b240-7770d1d23827" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.611966 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerStarted","Data":"5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.612244 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.625238 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerStarted","Data":"6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.631762 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerStarted","Data":"a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.632724 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.638062 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerStarted","Data":"f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.638631 4879 scope.go:117] "RemoveContainer" containerID="f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.638838 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=swift-operator-controller-manager-6fdc4fcf86-pf28x_openstack-operators(81a1e752-3477-4e08-b151-874b0e503a1b)\"" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" podUID="81a1e752-3477-4e08-b151-874b0e503a1b" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.642326 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" event={"ID":"9d5bb254-3519-4805-bbfa-c4fad026bec1","Type":"ContainerStarted","Data":"8a82cb19997252e95111c10138fd411a4b86e47c41ff5ff5c41c0e9d58870051"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.642708 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.666798 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerStarted","Data":"24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.667975 4879 scope.go:117] "RemoveContainer" containerID="24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.668728 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012)\"" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.669821 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" containerID="c5f08e97e38f1cc5598b942874b91f367bdb778e78104ad375cd319685e95d98" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.669905 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerDied","Data":"c5f08e97e38f1cc5598b942874b91f367bdb778e78104ad375cd319685e95d98"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.670687 4879 scope.go:117] "RemoveContainer" containerID="c5f08e97e38f1cc5598b942874b91f367bdb778e78104ad375cd319685e95d98" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.671024 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=neutron-operator-controller-manager-7c57c8bbc4-7mc5t_openstack-operators(8f91f389-91ad-4a56-9e71-5cf7bb88db01)\"" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" podUID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.672276 4879 generic.go:334] "Generic (PLEG): container finished" podID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" containerID="ba1b3b2889ec1e448d90660786baf6e8608e9efbcd410d30d7e21de7b21aa5d5" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.672349 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerDied","Data":"ba1b3b2889ec1e448d90660786baf6e8608e9efbcd410d30d7e21de7b21aa5d5"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.675154 4879 scope.go:117] "RemoveContainer" containerID="ba1b3b2889ec1e448d90660786baf6e8608e9efbcd410d30d7e21de7b21aa5d5" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.675466 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=operator pod=rabbitmq-cluster-operator-manager-668c99d594-zj2cr_openstack-operators(ee61acb4-f03b-4e5c-996c-3b4436b8e676)\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.675812 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerStarted","Data":"e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.676840 4879 scope.go:117] "RemoveContainer" containerID="e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.677941 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6)\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.679899 4879 generic.go:334] "Generic (PLEG): container finished" podID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" containerID="6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.679967 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerDied","Data":"6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.680520 4879 scope.go:117] "RemoveContainer" containerID="6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.680768 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.684438 4879 generic.go:334] "Generic (PLEG): container finished" podID="cefe5024-a03a-427e-84a5-a4f6eac64f12" containerID="0803087feda622bfea22aa45093484b99813743a567b0d466a022566d1770951" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.684514 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" event={"ID":"cefe5024-a03a-427e-84a5-a4f6eac64f12","Type":"ContainerDied","Data":"0803087feda622bfea22aa45093484b99813743a567b0d466a022566d1770951"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.685095 4879 scope.go:117] "RemoveContainer" containerID="0803087feda622bfea22aa45093484b99813743a567b0d466a022566d1770951" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.685422 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=horizon-operator-controller-manager-68c9694994-ltnmm_openstack-operators(cefe5024-a03a-427e-84a5-a4f6eac64f12)\"" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" podUID="cefe5024-a03a-427e-84a5-a4f6eac64f12" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.690931 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerStarted","Data":"9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.691866 4879 scope.go:117] "RemoveContainer" containerID="9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.692151 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4)\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.705615 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerStarted","Data":"2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.706374 4879 scope.go:117] "RemoveContainer" containerID="2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.706730 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=ironic-operator-controller-manager-5bfcdc958c-8tgzv_openstack-operators(be0d238d-5b08-42e1-ac21-4e00592ab433)\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podUID="be0d238d-5b08-42e1-ac21-4e00592ab433" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.714396 4879 generic.go:334] "Generic (PLEG): container finished" podID="5c199b9d-786f-4520-a7bb-67f616b16b88" containerID="cd206d49d5c18f3ffb366c319d925bac14896315238ab6e2f35b773c107973ab" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.714597 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" event={"ID":"5c199b9d-786f-4520-a7bb-67f616b16b88","Type":"ContainerDied","Data":"cd206d49d5c18f3ffb366c319d925bac14896315238ab6e2f35b773c107973ab"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.715109 4879 scope.go:117] "RemoveContainer" containerID="cd206d49d5c18f3ffb366c319d925bac14896315238ab6e2f35b773c107973ab" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.715387 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=octavia-operator-controller-manager-fd75fd47d-txbsq_openstack-operators(5c199b9d-786f-4520-a7bb-67f616b16b88)\"" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" podUID="5c199b9d-786f-4520-a7bb-67f616b16b88" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.724331 4879 scope.go:117] "RemoveContainer" containerID="59dfc0b1de9583c9b896942444207c4ab9e0e1b47ec26aa98f1ff61547fdfe37" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.725485 4879 generic.go:334] "Generic (PLEG): container finished" podID="94c3a712-5baa-4789-ad81-8d4c0554d84b" containerID="6f0682d992b0612d28bb35e8c17e8ade55e3250b7cff5df1394e63b9464533e0" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.725546 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" event={"ID":"94c3a712-5baa-4789-ad81-8d4c0554d84b","Type":"ContainerDied","Data":"6f0682d992b0612d28bb35e8c17e8ade55e3250b7cff5df1394e63b9464533e0"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.726368 4879 scope.go:117] "RemoveContainer" containerID="6f0682d992b0612d28bb35e8c17e8ade55e3250b7cff5df1394e63b9464533e0" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.726589 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=ovn-operator-controller-manager-66cf5c67ff-pr6k5_openstack-operators(94c3a712-5baa-4789-ad81-8d4c0554d84b)\"" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.731873 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" event={"ID":"2e38fffb-d3ee-488d-bbc7-811d4ba43797","Type":"ContainerStarted","Data":"e1ab50cc1e5bb202deb04a1ceab407f65ffe0cce2ee4790e314ce658003505cb"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.732333 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.741764 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerStarted","Data":"99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.742441 4879 scope.go:117] "RemoveContainer" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.742738 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.750370 4879 generic.go:334] "Generic (PLEG): container finished" podID="afbf9f55-3316-40bb-b53b-d4d96482f9d5" containerID="6d431eef944df702884f7ed955fe4b97ef32e7592aaab672e205eb555dd98d3c" exitCode=1 Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.750464 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" event={"ID":"afbf9f55-3316-40bb-b53b-d4d96482f9d5","Type":"ContainerDied","Data":"6d431eef944df702884f7ed955fe4b97ef32e7592aaab672e205eb555dd98d3c"} Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.751232 4879 scope.go:117] "RemoveContainer" containerID="6d431eef944df702884f7ed955fe4b97ef32e7592aaab672e205eb555dd98d3c" Nov 25 14:49:28 crc kubenswrapper[4879]: E1125 14:49:28.751508 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=infra-operator-controller-manager-d5cc86f4b-8tdb9_openstack-operators(afbf9f55-3316-40bb-b53b-d4d96482f9d5)\"" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" podUID="afbf9f55-3316-40bb-b53b-d4d96482f9d5" Nov 25 14:49:28 crc kubenswrapper[4879]: I1125 14:49:28.828254 4879 scope.go:117] "RemoveContainer" containerID="b68868d992497f15d66b4392e4402c457509eb6aa27993ea13fa11e819ba99fc" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.023935 4879 scope.go:117] "RemoveContainer" containerID="6f796f780c99704b8944b1370e22ce207ec3fb2fd5dc0eeefc613feaea57e477" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.073930 4879 scope.go:117] "RemoveContainer" containerID="03209e6d6dda01372cc565118ed0a6d94ff308b582a0a97b12f19fb9f7c90944" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.215679 4879 scope.go:117] "RemoveContainer" containerID="5da5f3449d43923103b1d58e899281bdafddc60c7407c382787dbdba33070b28" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.256585 4879 scope.go:117] "RemoveContainer" containerID="c61d69d1b9bb698e8ca414526867d5ba8969c43de2b2211136f35a4d65962982" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.394578 4879 scope.go:117] "RemoveContainer" containerID="c4ee00e63a67ef390923f7545c3c2252a9280c145298e88892162d045a5adf0e" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.765968 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" containerID="6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.766177 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerDied","Data":"6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.766398 4879 scope.go:117] "RemoveContainer" containerID="159d936594bd06ac88ccc353452a14c27042af95c9a9f3daeecc4055b01156e5" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.766888 4879 scope.go:117] "RemoveContainer" containerID="6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.767213 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f)\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.778493 4879 generic.go:334] "Generic (PLEG): container finished" podID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" containerID="5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.778573 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerDied","Data":"5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.780037 4879 scope.go:117] "RemoveContainer" containerID="5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.780846 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b)\"" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.781216 4879 generic.go:334] "Generic (PLEG): container finished" podID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" containerID="aaca102ee4b0c5bf46a07be9bf9983ea4005ed21f9daeeb9be7ed60867ba72f1" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.781303 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" event={"ID":"2037b3b9-3099-4f88-8e56-ec28ee25efa5","Type":"ContainerDied","Data":"aaca102ee4b0c5bf46a07be9bf9983ea4005ed21f9daeeb9be7ed60867ba72f1"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.781688 4879 scope.go:117] "RemoveContainer" containerID="aaca102ee4b0c5bf46a07be9bf9983ea4005ed21f9daeeb9be7ed60867ba72f1" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.792475 4879 generic.go:334] "Generic (PLEG): container finished" podID="81a1e752-3477-4e08-b151-874b0e503a1b" containerID="f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.792542 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerDied","Data":"f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.793103 4879 scope.go:117] "RemoveContainer" containerID="f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.793336 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=swift-operator-controller-manager-6fdc4fcf86-pf28x_openstack-operators(81a1e752-3477-4e08-b151-874b0e503a1b)\"" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" podUID="81a1e752-3477-4e08-b151-874b0e503a1b" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.801544 4879 generic.go:334] "Generic (PLEG): container finished" podID="279425db-228b-4697-864f-e50d2eb66012" containerID="24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.801619 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerDied","Data":"24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.802403 4879 scope.go:117] "RemoveContainer" containerID="24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.802655 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012)\"" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.825176 4879 generic.go:334] "Generic (PLEG): container finished" podID="be0d238d-5b08-42e1-ac21-4e00592ab433" containerID="2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.825236 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerDied","Data":"2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.826479 4879 scope.go:117] "RemoveContainer" containerID="2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.826985 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=ironic-operator-controller-manager-5bfcdc958c-8tgzv_openstack-operators(be0d238d-5b08-42e1-ac21-4e00592ab433)\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podUID="be0d238d-5b08-42e1-ac21-4e00592ab433" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.834321 4879 scope.go:117] "RemoveContainer" containerID="8990e3b8a000b763557590fc073a778b9f0a2151f2cacf3f7c1cee8295a55769" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.834419 4879 generic.go:334] "Generic (PLEG): container finished" podID="461b714a-4ee7-40ab-99d3-cd78552b52c6" containerID="e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.834460 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerDied","Data":"e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.835026 4879 scope.go:117] "RemoveContainer" containerID="e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.835280 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6)\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.840246 4879 generic.go:334] "Generic (PLEG): container finished" podID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" containerID="9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.840311 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerDied","Data":"9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.841012 4879 scope.go:117] "RemoveContainer" containerID="9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.841380 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4)\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.848974 4879 generic.go:334] "Generic (PLEG): container finished" podID="06ee2ae7-d534-4170-9862-53a2580c39ce" containerID="b71ac1a211654bdd6256dd61660e120d3c92ee883e0a27264e763738b1919885" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.849038 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerDied","Data":"b71ac1a211654bdd6256dd61660e120d3c92ee883e0a27264e763738b1919885"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.849808 4879 scope.go:117] "RemoveContainer" containerID="b71ac1a211654bdd6256dd61660e120d3c92ee883e0a27264e763738b1919885" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.855835 4879 generic.go:334] "Generic (PLEG): container finished" podID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" containerID="9a0020e247361f5ecb1fd10f69c56758427e9d274e94efb04979e9c31709228b" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.855914 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" event={"ID":"4ca6d024-306e-4707-abb0-1b57ed1e11b6","Type":"ContainerDied","Data":"9a0020e247361f5ecb1fd10f69c56758427e9d274e94efb04979e9c31709228b"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.856355 4879 scope.go:117] "RemoveContainer" containerID="9a0020e247361f5ecb1fd10f69c56758427e9d274e94efb04979e9c31709228b" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.856628 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=placement-operator-controller-manager-5db546f9d9-nk7tw_openstack-operators(4ca6d024-306e-4707-abb0-1b57ed1e11b6)\"" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podUID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.860962 4879 generic.go:334] "Generic (PLEG): container finished" podID="c5c5776f-3970-425a-b5a7-c4c859f821e0" containerID="a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.861041 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerDied","Data":"a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.861520 4879 scope.go:117] "RemoveContainer" containerID="a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.861721 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=nova-operator-controller-manager-79556f57fc-mq9fj_openstack-operators(c5c5776f-3970-425a-b5a7-c4c859f821e0)\"" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" podUID="c5c5776f-3970-425a-b5a7-c4c859f821e0" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.863891 4879 generic.go:334] "Generic (PLEG): container finished" podID="230849f3-daef-4f23-9839-8f0bd76d8e4a" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" exitCode=1 Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.863936 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerDied","Data":"99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f"} Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.864297 4879 scope.go:117] "RemoveContainer" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" Nov 25 14:49:29 crc kubenswrapper[4879]: E1125 14:49:29.864504 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:49:29 crc kubenswrapper[4879]: I1125 14:49:29.974923 4879 scope.go:117] "RemoveContainer" containerID="55b6a97a438917b8e123bbd1a64cb3e372526f39a06fc954aa7ea5075755dedb" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.043286 4879 scope.go:117] "RemoveContainer" containerID="5916db203b7bf14bbabf8bbda5a6a4f6b13a70d932f3391ed81231243ddf68bd" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.141162 4879 scope.go:117] "RemoveContainer" containerID="d7650819c2f2f67dcf84527f9e5a4b89c132fef6e147dcc2fe04b3414b4a8f8a" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.320084 4879 scope.go:117] "RemoveContainer" containerID="61cb4c2adb1ddf2c09a4c875d136aee7431d83f27a1baee40e1375d5e47f658b" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.422204 4879 scope.go:117] "RemoveContainer" containerID="2dc0817f3db5d45a26bafb93b0a9a549a0337190229d0aeb6a8ace1dc12bc769" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.456440 4879 scope.go:117] "RemoveContainer" containerID="a2f04fdcd428e81c5266cd6addfd05d0e2f215ed59ee47fdb5c11415427aa9e3" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.542912 4879 scope.go:117] "RemoveContainer" containerID="401f57fd2107333250f571a6331447e0a331329da55c67f02d5e2817483bf2a5" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.599541 4879 scope.go:117] "RemoveContainer" containerID="e11b3cc00fbcbf30f8ef921eff3eb4c6b508be12f95360187f09457b5a39a60a" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.882999 4879 scope.go:117] "RemoveContainer" containerID="6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6" Nov 25 14:49:30 crc kubenswrapper[4879]: E1125 14:49:30.883318 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f)\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.888058 4879 generic.go:334] "Generic (PLEG): container finished" podID="06ee2ae7-d534-4170-9862-53a2580c39ce" containerID="b313ab06e2be590c005396050e1d513fa828000a47c3f9069832dbf301a7fd07" exitCode=1 Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.888166 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerDied","Data":"b313ab06e2be590c005396050e1d513fa828000a47c3f9069832dbf301a7fd07"} Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.888212 4879 scope.go:117] "RemoveContainer" containerID="b71ac1a211654bdd6256dd61660e120d3c92ee883e0a27264e763738b1919885" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.889069 4879 scope.go:117] "RemoveContainer" containerID="b313ab06e2be590c005396050e1d513fa828000a47c3f9069832dbf301a7fd07" Nov 25 14:49:30 crc kubenswrapper[4879]: E1125 14:49:30.889477 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=watcher-operator-controller-manager-864885998-vnr67_openstack-operators(06ee2ae7-d534-4170-9862-53a2580c39ce)\"" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podUID="06ee2ae7-d534-4170-9862-53a2580c39ce" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.893006 4879 scope.go:117] "RemoveContainer" containerID="5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f" Nov 25 14:49:30 crc kubenswrapper[4879]: E1125 14:49:30.893351 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b)\"" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.897431 4879 scope.go:117] "RemoveContainer" containerID="a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37" Nov 25 14:49:30 crc kubenswrapper[4879]: E1125 14:49:30.897743 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=nova-operator-controller-manager-79556f57fc-mq9fj_openstack-operators(c5c5776f-3970-425a-b5a7-c4c859f821e0)\"" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" podUID="c5c5776f-3970-425a-b5a7-c4c859f821e0" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.904583 4879 generic.go:334] "Generic (PLEG): container finished" podID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" containerID="fbbaa211666e6df36d932f06ee802e24a000cabc386e214f346a3e772322281a" exitCode=1 Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.904690 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" event={"ID":"2037b3b9-3099-4f88-8e56-ec28ee25efa5","Type":"ContainerDied","Data":"fbbaa211666e6df36d932f06ee802e24a000cabc386e214f346a3e772322281a"} Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.906761 4879 scope.go:117] "RemoveContainer" containerID="fbbaa211666e6df36d932f06ee802e24a000cabc386e214f346a3e772322281a" Nov 25 14:49:30 crc kubenswrapper[4879]: E1125 14:49:30.907501 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-567f98c9d-zvb94_openstack-operators(2037b3b9-3099-4f88-8e56-ec28ee25efa5)\"" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podUID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.920286 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.920587 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/kube-state-metrics-0" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.921819 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-state-metrics" containerStatusID={"Type":"cri-o","ID":"ea3c704c9aa1285b9bc543c7cfe874f66462b4c6971e383387f4e246d4e75d11"} pod="openstack/kube-state-metrics-0" containerMessage="Container kube-state-metrics failed liveness probe, will be restarted" Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.921854 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" containerID="cri-o://ea3c704c9aa1285b9bc543c7cfe874f66462b4c6971e383387f4e246d4e75d11" gracePeriod=30 Nov 25 14:49:30 crc kubenswrapper[4879]: I1125 14:49:30.947231 4879 scope.go:117] "RemoveContainer" containerID="aaca102ee4b0c5bf46a07be9bf9983ea4005ed21f9daeeb9be7ed60867ba72f1" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.696340 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.696891 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.697623 4879 scope.go:117] "RemoveContainer" containerID="6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.698000 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.719311 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.719361 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.719722 4879 scope.go:117] "RemoveContainer" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.719958 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.751318 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.751364 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.752001 4879 scope.go:117] "RemoveContainer" containerID="e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.752329 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6)\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.772962 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.773073 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.788211 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.788278 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.788995 4879 scope.go:117] "RemoveContainer" containerID="9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.789288 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4)\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.799687 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.799747 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.800591 4879 scope.go:117] "RemoveContainer" containerID="0803087feda622bfea22aa45093484b99813743a567b0d466a022566d1770951" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.800884 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=horizon-operator-controller-manager-68c9694994-ltnmm_openstack-operators(cefe5024-a03a-427e-84a5-a4f6eac64f12)\"" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" podUID="cefe5024-a03a-427e-84a5-a4f6eac64f12" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.837189 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.837239 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.837905 4879 scope.go:117] "RemoveContainer" containerID="2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.838171 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=ironic-operator-controller-manager-5bfcdc958c-8tgzv_openstack-operators(be0d238d-5b08-42e1-ac21-4e00592ab433)\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podUID="be0d238d-5b08-42e1-ac21-4e00592ab433" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.937429 4879 generic.go:334] "Generic (PLEG): container finished" podID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerID="ea3c704c9aa1285b9bc543c7cfe874f66462b4c6971e383387f4e246d4e75d11" exitCode=2 Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.938069 4879 scope.go:117] "RemoveContainer" containerID="6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.938351 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.938750 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerDied","Data":"ea3c704c9aa1285b9bc543c7cfe874f66462b4c6971e383387f4e246d4e75d11"} Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.939171 4879 scope.go:117] "RemoveContainer" containerID="6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.939393 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f)\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.939725 4879 scope.go:117] "RemoveContainer" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.939955 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.993691 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:49:31 crc kubenswrapper[4879]: I1125 14:49:31.995116 4879 scope.go:117] "RemoveContainer" containerID="5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f" Nov 25 14:49:31 crc kubenswrapper[4879]: E1125 14:49:31.995360 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b)\"" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.013932 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.013978 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.014682 4879 scope.go:117] "RemoveContainer" containerID="24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.014958 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012)\"" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.130676 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.130731 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.131404 4879 scope.go:117] "RemoveContainer" containerID="c5f08e97e38f1cc5598b942874b91f367bdb778e78104ad375cd319685e95d98" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.131637 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=neutron-operator-controller-manager-7c57c8bbc4-7mc5t_openstack-operators(8f91f389-91ad-4a56-9e71-5cf7bb88db01)\"" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" podUID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.142920 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.143598 4879 scope.go:117] "RemoveContainer" containerID="a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.143821 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=nova-operator-controller-manager-79556f57fc-mq9fj_openstack-operators(c5c5776f-3970-425a-b5a7-c4c859f821e0)\"" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" podUID="c5c5776f-3970-425a-b5a7-c4c859f821e0" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.147602 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.151680 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.151712 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.152144 4879 scope.go:117] "RemoveContainer" containerID="cd206d49d5c18f3ffb366c319d925bac14896315238ab6e2f35b773c107973ab" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.152329 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=octavia-operator-controller-manager-fd75fd47d-txbsq_openstack-operators(5c199b9d-786f-4520-a7bb-67f616b16b88)\"" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" podUID="5c199b9d-786f-4520-a7bb-67f616b16b88" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.262684 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.263433 4879 scope.go:117] "RemoveContainer" containerID="6f0682d992b0612d28bb35e8c17e8ade55e3250b7cff5df1394e63b9464533e0" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.263660 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=ovn-operator-controller-manager-66cf5c67ff-pr6k5_openstack-operators(94c3a712-5baa-4789-ad81-8d4c0554d84b)\"" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.263836 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.283350 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.283419 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.284327 4879 scope.go:117] "RemoveContainer" containerID="9c8ba8fc79c4ec42beec4874612f9bd965b28b8bf60579933f658c9c44802114" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.284716 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=keystone-operator-controller-manager-748dc6576f-5rjs9_openstack-operators(89a61837-ab76-494d-a98d-268fed9bbe35)\"" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" podUID="89a61837-ab76-494d-a98d-268fed9bbe35" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.346561 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.346606 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.347315 4879 scope.go:117] "RemoveContainer" containerID="9a0020e247361f5ecb1fd10f69c56758427e9d274e94efb04979e9c31709228b" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.347586 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=placement-operator-controller-manager-5db546f9d9-nk7tw_openstack-operators(4ca6d024-306e-4707-abb0-1b57ed1e11b6)\"" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" podUID="4ca6d024-306e-4707-abb0-1b57ed1e11b6" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.371921 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.371969 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.372739 4879 scope.go:117] "RemoveContainer" containerID="f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.373062 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=swift-operator-controller-manager-6fdc4fcf86-pf28x_openstack-operators(81a1e752-3477-4e08-b151-874b0e503a1b)\"" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" podUID="81a1e752-3477-4e08-b151-874b0e503a1b" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.384884 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.384951 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.385618 4879 scope.go:117] "RemoveContainer" containerID="fbbaa211666e6df36d932f06ee802e24a000cabc386e214f346a3e772322281a" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.385860 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=telemetry-operator-controller-manager-567f98c9d-zvb94_openstack-operators(2037b3b9-3099-4f88-8e56-ec28ee25efa5)\"" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" podUID="2037b3b9-3099-4f88-8e56-ec28ee25efa5" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.481360 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.481426 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.482288 4879 scope.go:117] "RemoveContainer" containerID="6d431eef944df702884f7ed955fe4b97ef32e7592aaab672e205eb555dd98d3c" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.482544 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=infra-operator-controller-manager-d5cc86f4b-8tdb9_openstack-operators(afbf9f55-3316-40bb-b53b-d4d96482f9d5)\"" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" podUID="afbf9f55-3316-40bb-b53b-d4d96482f9d5" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.585023 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.585082 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.585810 4879 scope.go:117] "RemoveContainer" containerID="b313ab06e2be590c005396050e1d513fa828000a47c3f9069832dbf301a7fd07" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.586105 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=watcher-operator-controller-manager-864885998-vnr67_openstack-operators(06ee2ae7-d534-4170-9862-53a2580c39ce)\"" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podUID="06ee2ae7-d534-4170-9862-53a2580c39ce" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.947086 4879 generic.go:334] "Generic (PLEG): container finished" podID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerID="3ad409754d3a5b251fbd86ec78dcf36d15b8f46ac576be0733ebc84bb6de0b47" exitCode=1 Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.947157 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerDied","Data":"3ad409754d3a5b251fbd86ec78dcf36d15b8f46ac576be0733ebc84bb6de0b47"} Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.947487 4879 scope.go:117] "RemoveContainer" containerID="ea3c704c9aa1285b9bc543c7cfe874f66462b4c6971e383387f4e246d4e75d11" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.948055 4879 scope.go:117] "RemoveContainer" containerID="3ad409754d3a5b251fbd86ec78dcf36d15b8f46ac576be0733ebc84bb6de0b47" Nov 25 14:49:32 crc kubenswrapper[4879]: I1125 14:49:32.948162 4879 scope.go:117] "RemoveContainer" containerID="6f0682d992b0612d28bb35e8c17e8ade55e3250b7cff5df1394e63b9464533e0" Nov 25 14:49:32 crc kubenswrapper[4879]: E1125 14:49:32.948396 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=ovn-operator-controller-manager-66cf5c67ff-pr6k5_openstack-operators(94c3a712-5baa-4789-ad81-8d4c0554d84b)\"" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" podUID="94c3a712-5baa-4789-ad81-8d4c0554d84b" Nov 25 14:49:33 crc kubenswrapper[4879]: I1125 14:49:33.790288 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf" Nov 25 14:49:34 crc kubenswrapper[4879]: I1125 14:49:34.401263 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-nz7qk" Nov 25 14:49:34 crc kubenswrapper[4879]: I1125 14:49:34.579156 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 25 14:49:34 crc kubenswrapper[4879]: I1125 14:49:34.644924 4879 scope.go:117] "RemoveContainer" containerID="d6b30ee38c73ed38855be5f4bd952c64e53c799ec4b538b48bfbb66787e7ac81" Nov 25 14:49:34 crc kubenswrapper[4879]: I1125 14:49:34.768571 4879 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 25 14:49:34 crc kubenswrapper[4879]: I1125 14:49:34.967934 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerStarted","Data":"531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15"} Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.383880 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.446011 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.450487 4879 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.632174 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.675204 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.940542 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.979471 4879 generic.go:334] "Generic (PLEG): container finished" podID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" exitCode=1 Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.979593 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerDied","Data":"2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07"} Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.979720 4879 scope.go:117] "RemoveContainer" containerID="3ad409754d3a5b251fbd86ec78dcf36d15b8f46ac576be0733ebc84bb6de0b47" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.980143 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:49:35 crc kubenswrapper[4879]: E1125 14:49:35.980454 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-state-metrics pod=kube-state-metrics-0_openstack(821c14ef-2ea4-488d-84f9-2234a6e27447)\"" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.983111 4879 generic.go:334] "Generic (PLEG): container finished" podID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" containerID="531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15" exitCode=1 Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.983255 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerDied","Data":"531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15"} Nov 25 14:49:35 crc kubenswrapper[4879]: I1125 14:49:35.983806 4879 scope.go:117] "RemoveContainer" containerID="531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15" Nov 25 14:49:35 crc kubenswrapper[4879]: E1125 14:49:35.984677 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=metallb-operator-controller-manager-7965d46465-b9w8p_metallb-system(1bf9f1b8-1476-4f3a-963b-986a0ae66426)\"" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.046112 4879 scope.go:117] "RemoveContainer" containerID="d6b30ee38c73ed38855be5f4bd952c64e53c799ec4b538b48bfbb66787e7ac81" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.138916 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.248827 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.253757 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.254736 4879 scope.go:117] "RemoveContainer" containerID="33b75d3d991f21a884351432091d48418cb58acce93d56c4ce99e2bca63bae73" Nov 25 14:49:36 crc kubenswrapper[4879]: E1125 14:49:36.255006 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=openstack-operator-controller-manager-77bf44fb75-xdhlf_openstack-operators(b7d6b37e-0aff-4496-b240-7770d1d23827)\"" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" podUID="b7d6b37e-0aff-4496-b240-7770d1d23827" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.255847 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.338636 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.386149 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.393474 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.494221 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.559660 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.643514 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.644015 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.789219 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.925852 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-6d4l7" Nov 25 14:49:36 crc kubenswrapper[4879]: I1125 14:49:36.993328 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:49:36 crc kubenswrapper[4879]: E1125 14:49:36.993640 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-state-metrics pod=kube-state-metrics-0_openstack(821c14ef-2ea4-488d-84f9-2234a6e27447)\"" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.000861 4879 scope.go:117] "RemoveContainer" containerID="33b75d3d991f21a884351432091d48418cb58acce93d56c4ce99e2bca63bae73" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.000913 4879 scope.go:117] "RemoveContainer" containerID="531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15" Nov 25 14:49:37 crc kubenswrapper[4879]: E1125 14:49:37.001173 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=openstack-operator-controller-manager-77bf44fb75-xdhlf_openstack-operators(b7d6b37e-0aff-4496-b240-7770d1d23827)\"" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" podUID="b7d6b37e-0aff-4496-b240-7770d1d23827" Nov 25 14:49:37 crc kubenswrapper[4879]: E1125 14:49:37.001194 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=metallb-operator-controller-manager-7965d46465-b9w8p_metallb-system(1bf9f1b8-1476-4f3a-963b-986a0ae66426)\"" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.011624 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.012713 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.045834 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.112974 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.141477 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.143180 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5fd4b8b4b5-64rqt" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.371407 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.375177 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.584931 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.635532 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-msmxj" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.734678 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.775495 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.808707 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.810437 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-926tr" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.938086 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 25 14:49:37 crc kubenswrapper[4879]: I1125 14:49:37.950192 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.058505 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.066795 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-48gsd" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.111616 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.187823 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.231078 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.254968 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.295313 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-rdk4k" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.296019 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.408329 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.419352 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.467107 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-qnmfv" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.515009 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.528406 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.577201 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.695188 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.709439 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.807679 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-mp6bw" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.863983 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-dfrxk" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.875354 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.887575 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.898871 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.927501 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 25 14:49:38 crc kubenswrapper[4879]: I1125 14:49:38.970083 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.166903 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.167015 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-ptrrw" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.177561 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.227871 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.246316 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.249408 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.271415 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.277959 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.292089 4879 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.296906 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=36.296886514 podStartE2EDuration="36.296886514s" podCreationTimestamp="2025-11-25 14:49:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:49:24.829735212 +0000 UTC m=+1456.433148293" watchObservedRunningTime="2025-11-25 14:49:39.296886514 +0000 UTC m=+1470.900299595" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.299146 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=28.297661638 podStartE2EDuration="39.299135707s" podCreationTimestamp="2025-11-25 14:49:00 +0000 UTC" firstStartedPulling="2025-11-25 14:49:01.393254563 +0000 UTC m=+1432.996667634" lastFinishedPulling="2025-11-25 14:49:12.394728632 +0000 UTC m=+1443.998141703" observedRunningTime="2025-11-25 14:49:24.956854724 +0000 UTC m=+1456.560267805" watchObservedRunningTime="2025-11-25 14:49:39.299135707 +0000 UTC m=+1470.902548778" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.300326 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=38.300320148 podStartE2EDuration="38.300320148s" podCreationTimestamp="2025-11-25 14:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:49:24.863812191 +0000 UTC m=+1456.467225262" watchObservedRunningTime="2025-11-25 14:49:39.300320148 +0000 UTC m=+1470.903733229" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.305283 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0","openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.305604 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.310546 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.326973 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=15.326951982 podStartE2EDuration="15.326951982s" podCreationTimestamp="2025-11-25 14:49:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:49:39.323268201 +0000 UTC m=+1470.926681282" watchObservedRunningTime="2025-11-25 14:49:39.326951982 +0000 UTC m=+1470.930365053" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.358621 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.422718 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.534339 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-5ks9l" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.564459 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.568784 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mxvzg" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.626013 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.657074 4879 scope.go:117] "RemoveContainer" containerID="ba1b3b2889ec1e448d90660786baf6e8608e9efbcd410d30d7e21de7b21aa5d5" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.665525 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a" path="/var/lib/kubelet/pods/2a0a0c9b-7a3c-45f3-b8c8-5bc5e8cd211a/volumes" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.707027 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.717412 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.741112 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.801538 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.805711 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.837593 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.842767 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-5x88k" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.870548 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.927069 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.930516 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.949725 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 25 14:49:39 crc kubenswrapper[4879]: I1125 14:49:39.973459 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.022555 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.025737 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerStarted","Data":"f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074"} Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.103550 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.118736 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-wkh6w" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.125887 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.144239 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.159142 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.161594 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.256371 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.275892 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-vxt5s" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.293931 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.344036 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.353250 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.372793 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.420089 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.448436 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.457892 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.478439 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.516618 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.556094 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.667394 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.689849 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.718227 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.718232 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.777804 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.781618 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.813948 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.842014 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.909350 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/kube-state-metrics-0" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.909651 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.910181 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:49:40 crc kubenswrapper[4879]: E1125 14:49:40.910491 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-state-metrics pod=kube-state-metrics-0_openstack(821c14ef-2ea4-488d-84f9-2234a6e27447)\"" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.946027 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.946568 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-j45bs" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.948878 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-mzjv2" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.952554 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.979451 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-7zcln" Nov 25 14:49:40 crc kubenswrapper[4879]: I1125 14:49:40.997926 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.011555 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.011863 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.014039 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.022241 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.034109 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.035650 4879 generic.go:334] "Generic (PLEG): container finished" podID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" containerID="f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074" exitCode=1 Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.035742 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerDied","Data":"f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074"} Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.035800 4879 scope.go:117] "RemoveContainer" containerID="ba1b3b2889ec1e448d90660786baf6e8608e9efbcd410d30d7e21de7b21aa5d5" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.036331 4879 scope.go:117] "RemoveContainer" containerID="f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074" Nov 25 14:49:41 crc kubenswrapper[4879]: E1125 14:49:41.036596 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=operator pod=rabbitmq-cluster-operator-manager-668c99d594-zj2cr_openstack-operators(ee61acb4-f03b-4e5c-996c-3b4436b8e676)\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.037955 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:49:41 crc kubenswrapper[4879]: E1125 14:49:41.038277 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-state-metrics pod=kube-state-metrics-0_openstack(821c14ef-2ea4-488d-84f9-2234a6e27447)\"" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.101451 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.129759 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.136017 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.140148 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-c2rkz" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.199516 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.282871 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.306423 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.323821 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.332360 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.346299 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.425204 4879 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.505484 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.526832 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.559509 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.578460 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.591543 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-qc22t" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.612359 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.622949 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.623156 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-hmphj" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.629359 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.635060 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.706376 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.728860 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.742510 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.774979 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-fn6n8"] Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.775275 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerName="dnsmasq-dns" containerID="cri-o://1feca86685f4db2898b53b2d74a33b811cc2699d97b2d5d9c9274b9e6bd05285" gracePeriod=10 Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.810792 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.821546 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.822757 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.833034 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.881520 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.922850 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.950513 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.953049 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.976630 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 25 14:49:41 crc kubenswrapper[4879]: I1125 14:49:41.998956 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.016329 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.028441 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.035679 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.059378 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.074868 4879 scope.go:117] "RemoveContainer" containerID="f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074" Nov 25 14:49:42 crc kubenswrapper[4879]: E1125 14:49:42.075143 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=operator pod=rabbitmq-cluster-operator-manager-668c99d594-zj2cr_openstack-operators(ee61acb4-f03b-4e5c-996c-3b4436b8e676)\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.080278 4879 generic.go:334] "Generic (PLEG): container finished" podID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerID="1feca86685f4db2898b53b2d74a33b811cc2699d97b2d5d9c9274b9e6bd05285" exitCode=0 Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.080394 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" event={"ID":"676c4fde-fa81-48be-82cf-c8aa4baf4d71","Type":"ContainerDied","Data":"1feca86685f4db2898b53b2d74a33b811cc2699d97b2d5d9c9274b9e6bd05285"} Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.095942 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.105453 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-tgpxw" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.108269 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.201755 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.254728 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.273006 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.301214 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.331185 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.331851 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.333801 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.346793 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.350354 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.413769 4879 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.420632 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-config\") pod \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.420689 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-swift-storage-0\") pod \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.420741 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq6lf\" (UniqueName: \"kubernetes.io/projected/676c4fde-fa81-48be-82cf-c8aa4baf4d71-kube-api-access-wq6lf\") pod \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.420820 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-sb\") pod \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.420875 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-nb\") pod \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.420917 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-svc\") pod \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\" (UID: \"676c4fde-fa81-48be-82cf-c8aa4baf4d71\") " Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.424945 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.428796 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.440258 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/676c4fde-fa81-48be-82cf-c8aa4baf4d71-kube-api-access-wq6lf" (OuterVolumeSpecName: "kube-api-access-wq6lf") pod "676c4fde-fa81-48be-82cf-c8aa4baf4d71" (UID: "676c4fde-fa81-48be-82cf-c8aa4baf4d71"). InnerVolumeSpecName "kube-api-access-wq6lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.459003 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.474796 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-746hf" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.480743 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "676c4fde-fa81-48be-82cf-c8aa4baf4d71" (UID: "676c4fde-fa81-48be-82cf-c8aa4baf4d71"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.486925 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-zxms2" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.491481 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-config" (OuterVolumeSpecName: "config") pod "676c4fde-fa81-48be-82cf-c8aa4baf4d71" (UID: "676c4fde-fa81-48be-82cf-c8aa4baf4d71"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.491605 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "676c4fde-fa81-48be-82cf-c8aa4baf4d71" (UID: "676c4fde-fa81-48be-82cf-c8aa4baf4d71"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.491691 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "676c4fde-fa81-48be-82cf-c8aa4baf4d71" (UID: "676c4fde-fa81-48be-82cf-c8aa4baf4d71"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.503707 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "676c4fde-fa81-48be-82cf-c8aa4baf4d71" (UID: "676c4fde-fa81-48be-82cf-c8aa4baf4d71"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.522233 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.522268 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.522278 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.522287 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.522297 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/676c4fde-fa81-48be-82cf-c8aa4baf4d71-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.522306 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq6lf\" (UniqueName: \"kubernetes.io/projected/676c4fde-fa81-48be-82cf-c8aa4baf4d71-kube-api-access-wq6lf\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.527826 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.541214 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cb74df96-52qvx" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.542941 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.600893 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.601641 4879 scope.go:117] "RemoveContainer" containerID="531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15" Nov 25 14:49:42 crc kubenswrapper[4879]: E1125 14:49:42.601913 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=metallb-operator-controller-manager-7965d46465-b9w8p_metallb-system(1bf9f1b8-1476-4f3a-963b-986a0ae66426)\"" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.604492 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.636711 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-5zhwv" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.644859 4879 scope.go:117] "RemoveContainer" containerID="6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.645271 4879 scope.go:117] "RemoveContainer" containerID="24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.645607 4879 scope.go:117] "RemoveContainer" containerID="9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.645876 4879 scope.go:117] "RemoveContainer" containerID="e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.689093 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.690633 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.710471 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.753956 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.768558 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.770457 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.787618 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.805108 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.811436 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.858590 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.860115 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 25 14:49:42 crc kubenswrapper[4879]: I1125 14:49:42.877954 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.020872 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.086467 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.094567 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" event={"ID":"676c4fde-fa81-48be-82cf-c8aa4baf4d71","Type":"ContainerDied","Data":"1bd6a9c3ef9188df32097726d505835403b28807ebedcf4950372597ccc0432d"} Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.094625 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf78879c9-fn6n8" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.094653 4879 scope.go:117] "RemoveContainer" containerID="1feca86685f4db2898b53b2d74a33b811cc2699d97b2d5d9c9274b9e6bd05285" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.095623 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.098375 4879 generic.go:334] "Generic (PLEG): container finished" podID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" containerID="fb6423a6400a36b8cf279794eeac07107a047c58761d76b09b14f15acd9bfe22" exitCode=1 Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.098417 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerDied","Data":"fb6423a6400a36b8cf279794eeac07107a047c58761d76b09b14f15acd9bfe22"} Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.099287 4879 scope.go:117] "RemoveContainer" containerID="fb6423a6400a36b8cf279794eeac07107a047c58761d76b09b14f15acd9bfe22" Nov 25 14:49:43 crc kubenswrapper[4879]: E1125 14:49:43.099741 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f)\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.105675 4879 generic.go:334] "Generic (PLEG): container finished" podID="461b714a-4ee7-40ab-99d3-cd78552b52c6" containerID="47fc7bcf3bc98656a9057f632cfcb6a6ea1c97bc7fb00acc875c805a97b3adca" exitCode=1 Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.105755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerDied","Data":"47fc7bcf3bc98656a9057f632cfcb6a6ea1c97bc7fb00acc875c805a97b3adca"} Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.106447 4879 scope.go:117] "RemoveContainer" containerID="47fc7bcf3bc98656a9057f632cfcb6a6ea1c97bc7fb00acc875c805a97b3adca" Nov 25 14:49:43 crc kubenswrapper[4879]: E1125 14:49:43.106718 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6)\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.109390 4879 generic.go:334] "Generic (PLEG): container finished" podID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" containerID="c180e2b54ea469bf523a7fd603a35b6a819eba05ac0092fad01e0b1295e53af8" exitCode=1 Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.109455 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerDied","Data":"c180e2b54ea469bf523a7fd603a35b6a819eba05ac0092fad01e0b1295e53af8"} Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.109944 4879 scope.go:117] "RemoveContainer" containerID="c180e2b54ea469bf523a7fd603a35b6a819eba05ac0092fad01e0b1295e53af8" Nov 25 14:49:43 crc kubenswrapper[4879]: E1125 14:49:43.110246 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4)\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.112409 4879 generic.go:334] "Generic (PLEG): container finished" podID="279425db-228b-4697-864f-e50d2eb66012" containerID="0a2587a52f395489e31410f9b820f5562b35ac69a94bb20b37486f61d4b6891b" exitCode=1 Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.112458 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerDied","Data":"0a2587a52f395489e31410f9b820f5562b35ac69a94bb20b37486f61d4b6891b"} Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.113144 4879 scope.go:117] "RemoveContainer" containerID="0a2587a52f395489e31410f9b820f5562b35ac69a94bb20b37486f61d4b6891b" Nov 25 14:49:43 crc kubenswrapper[4879]: E1125 14:49:43.113395 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012)\"" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.121013 4879 scope.go:117] "RemoveContainer" containerID="4e39a3f09832916ea44593c7f26401450afbe98b669fa37611e1934efc9ad63b" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.143556 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.146617 4879 scope.go:117] "RemoveContainer" containerID="6c846f6656350305488011adce40a00422647c3aed16da522864d0972a963ad6" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.180243 4879 scope.go:117] "RemoveContainer" containerID="e2466ff8cfbcad76088ba39a668554e1de22a90f18cbe93a71e3d9874aa28ec9" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.196052 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-fn6n8"] Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.208558 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf78879c9-fn6n8"] Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.214410 4879 scope.go:117] "RemoveContainer" containerID="9363659f46e736ee8e9be00a4010d983a5e0301efcb80fd858137c372129b377" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.245521 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.288777 4879 scope.go:117] "RemoveContainer" containerID="24287c24a5ddb4f96c3c7d15e25abf188ce74daf56560d85eb3cd71f2ee7a82e" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.361366 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.371302 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.375374 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.432204 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.567206 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-45gtr" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.582959 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.600597 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.600720 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.646338 4879 scope.go:117] "RemoveContainer" containerID="2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.646917 4879 scope.go:117] "RemoveContainer" containerID="b313ab06e2be590c005396050e1d513fa828000a47c3f9069832dbf301a7fd07" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.646992 4879 scope.go:117] "RemoveContainer" containerID="c5f08e97e38f1cc5598b942874b91f367bdb778e78104ad375cd319685e95d98" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.647077 4879 scope.go:117] "RemoveContainer" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.648538 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.650618 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.666766 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" path="/var/lib/kubelet/pods/676c4fde-fa81-48be-82cf-c8aa4baf4d71/volumes" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.671887 4879 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.698940 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4wlql" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.728649 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.739388 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.762654 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.768342 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.801224 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.841690 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.889045 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-v8tjg" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.889298 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.927253 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.937315 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 25 14:49:43 crc kubenswrapper[4879]: I1125 14:49:43.977344 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-n96r6" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.064923 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.104890 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-pxcqp" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.131113 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" containerID="315978420b79688ac7d29ea621445d8102ff41d347924d89037951f85cd8299f" exitCode=1 Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.131218 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerDied","Data":"315978420b79688ac7d29ea621445d8102ff41d347924d89037951f85cd8299f"} Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.131254 4879 scope.go:117] "RemoveContainer" containerID="c5f08e97e38f1cc5598b942874b91f367bdb778e78104ad375cd319685e95d98" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.131947 4879 scope.go:117] "RemoveContainer" containerID="315978420b79688ac7d29ea621445d8102ff41d347924d89037951f85cd8299f" Nov 25 14:49:44 crc kubenswrapper[4879]: E1125 14:49:44.132385 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=neutron-operator-controller-manager-7c57c8bbc4-7mc5t_openstack-operators(8f91f389-91ad-4a56-9e71-5cf7bb88db01)\"" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" podUID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.143494 4879 generic.go:334] "Generic (PLEG): container finished" podID="230849f3-daef-4f23-9839-8f0bd76d8e4a" containerID="fe27271c9733ce8344e4d95ce86a766a52f0e5f73ccacc4e8abf22394af74006" exitCode=1 Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.143566 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerDied","Data":"fe27271c9733ce8344e4d95ce86a766a52f0e5f73ccacc4e8abf22394af74006"} Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.144400 4879 scope.go:117] "RemoveContainer" containerID="fe27271c9733ce8344e4d95ce86a766a52f0e5f73ccacc4e8abf22394af74006" Nov 25 14:49:44 crc kubenswrapper[4879]: E1125 14:49:44.144683 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.172636 4879 generic.go:334] "Generic (PLEG): container finished" podID="be0d238d-5b08-42e1-ac21-4e00592ab433" containerID="a9beac7b90924a6f4456204338f4ceb6c0e3ab40accdee6fa9866ce92e348361" exitCode=1 Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.172766 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerDied","Data":"a9beac7b90924a6f4456204338f4ceb6c0e3ab40accdee6fa9866ce92e348361"} Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.173645 4879 scope.go:117] "RemoveContainer" containerID="a9beac7b90924a6f4456204338f4ceb6c0e3ab40accdee6fa9866ce92e348361" Nov 25 14:49:44 crc kubenswrapper[4879]: E1125 14:49:44.174784 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=ironic-operator-controller-manager-5bfcdc958c-8tgzv_openstack-operators(be0d238d-5b08-42e1-ac21-4e00592ab433)\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podUID="be0d238d-5b08-42e1-ac21-4e00592ab433" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.188682 4879 generic.go:334] "Generic (PLEG): container finished" podID="06ee2ae7-d534-4170-9862-53a2580c39ce" containerID="d61523e98f0c1bf85398f2be6349e8b5cd81cabe101a1b8141a1e88cb3030bc8" exitCode=1 Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.188747 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerDied","Data":"d61523e98f0c1bf85398f2be6349e8b5cd81cabe101a1b8141a1e88cb3030bc8"} Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.190654 4879 scope.go:117] "RemoveContainer" containerID="d61523e98f0c1bf85398f2be6349e8b5cd81cabe101a1b8141a1e88cb3030bc8" Nov 25 14:49:44 crc kubenswrapper[4879]: E1125 14:49:44.190975 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=watcher-operator-controller-manager-864885998-vnr67_openstack-operators(06ee2ae7-d534-4170-9862-53a2580c39ce)\"" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podUID="06ee2ae7-d534-4170-9862-53a2580c39ce" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.199855 4879 scope.go:117] "RemoveContainer" containerID="99cc1887a4f2a4ec448a8373f101a34c6d7da963d7498a8073ce5b4c0045593f" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.264174 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.287577 4879 scope.go:117] "RemoveContainer" containerID="2ea418abeed6a8e6f435ee5e9cd222e7b70234e5d64bea23a51f16ddc7b634a9" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.291586 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.338745 4879 scope.go:117] "RemoveContainer" containerID="b313ab06e2be590c005396050e1d513fa828000a47c3f9069832dbf301a7fd07" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.482446 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.502727 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9h8lp" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.502785 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.543318 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.570314 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qffvr" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.588266 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.607916 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.633342 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.645846 4879 scope.go:117] "RemoveContainer" containerID="6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.645973 4879 scope.go:117] "RemoveContainer" containerID="5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.649291 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.659463 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.667163 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.684598 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.757682 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.825085 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.826581 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.832977 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-wztmv" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.891354 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-czl25" Nov 25 14:49:44 crc kubenswrapper[4879]: I1125 14:49:44.963485 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046040 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046409 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" containerName="placement-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046425 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" containerName="placement-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046444 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" containerName="cinder-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046451 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" containerName="cinder-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046482 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerName="dnsmasq-dns" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046489 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerName="dnsmasq-dns" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046503 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284aa011-0c93-49d5-a07e-4580b44f1cdc" containerName="neutron-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046521 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="284aa011-0c93-49d5-a07e-4580b44f1cdc" containerName="neutron-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046530 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" containerName="installer" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046535 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" containerName="installer" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046543 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerName="init" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046550 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerName="init" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.046558 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" containerName="barbican-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046563 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" containerName="barbican-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046748 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" containerName="cinder-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046760 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="676c4fde-fa81-48be-82cf-c8aa4baf4d71" containerName="dnsmasq-dns" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046772 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" containerName="placement-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046783 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="211ea349-e87d-41a4-aa0e-e07b5cd25946" containerName="installer" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046793 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" containerName="barbican-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.046809 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="284aa011-0c93-49d5-a07e-4580b44f1cdc" containerName="neutron-db-sync" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.047662 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.050233 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.050557 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.050750 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.062922 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.065238 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.067180 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.089428 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.097086 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.102574 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173151 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-logs\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173201 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173281 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173316 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173335 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xntt6\" (UniqueName: \"kubernetes.io/projected/91e65be6-32db-4fc4-a6d8-c2cba036263e-kube-api-access-xntt6\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173384 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173406 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.173426 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.179738 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.202881 4879 generic.go:334] "Generic (PLEG): container finished" podID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" containerID="4fe022349c5fa8bdaf90ede1af0ed3dbbe29d3918a0e521b2b9ea2b62cb3d230" exitCode=1 Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.202956 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerDied","Data":"4fe022349c5fa8bdaf90ede1af0ed3dbbe29d3918a0e521b2b9ea2b62cb3d230"} Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.203006 4879 scope.go:117] "RemoveContainer" containerID="5dad50f880516e6426a4a8935fbd2087a7a2fe223bb889b1ba5c06bddc16014f" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.204192 4879 scope.go:117] "RemoveContainer" containerID="4fe022349c5fa8bdaf90ede1af0ed3dbbe29d3918a0e521b2b9ea2b62cb3d230" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.204597 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b)\"" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.208173 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.211758 4879 generic.go:334] "Generic (PLEG): container finished" podID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" containerID="f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2" exitCode=1 Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.211821 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerDied","Data":"f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2"} Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.212415 4879 scope.go:117] "RemoveContainer" containerID="f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.212679 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.214828 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.240945 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.270598 4879 scope.go:117] "RemoveContainer" containerID="6b1c29fbe900afaeb08c756e71449ee50dfd21c0e41cd1539848d9e6b9e866ef" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275106 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-logs\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275170 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275232 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275268 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275284 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xntt6\" (UniqueName: \"kubernetes.io/projected/91e65be6-32db-4fc4-a6d8-c2cba036263e-kube-api-access-xntt6\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275332 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275382 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275422 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.275683 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-logs\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.276012 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.276422 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.282037 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.282177 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.283265 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.290653 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.294019 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xntt6\" (UniqueName: \"kubernetes.io/projected/91e65be6-32db-4fc4-a6d8-c2cba036263e-kube-api-access-xntt6\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.298347 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.314764 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.366950 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.391246 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.411359 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.436288 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.455378 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-gmkt6" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.481641 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.484078 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.577280 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.616068 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.645607 4879 scope.go:117] "RemoveContainer" containerID="f36f5d6213d8ee5833e77ba4d7fcea05d1749b4fd1a16264412cefdf7474036b" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.645884 4879 scope.go:117] "RemoveContainer" containerID="6f0682d992b0612d28bb35e8c17e8ade55e3250b7cff5df1394e63b9464533e0" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.646364 4879 scope.go:117] "RemoveContainer" containerID="9c8ba8fc79c4ec42beec4874612f9bd965b28b8bf60579933f658c9c44802114" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.701612 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.747241 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.749043 4879 log.go:32] "RunPodSandbox from runtime service failed" err=< Nov 25 14:49:45 crc kubenswrapper[4879]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_glance-default-internal-api-0_openstack_91e65be6-32db-4fc4-a6d8-c2cba036263e_0(a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296): error adding pod openstack_glance-default-internal-api-0 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296" Netns:"/var/run/netns/e36c30fb-e8ad-46d5-bf82-8fcfcfad2ecf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=glance-default-internal-api-0;K8S_POD_INFRA_CONTAINER_ID=a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296;K8S_POD_UID=91e65be6-32db-4fc4-a6d8-c2cba036263e" Path:"" ERRORED: error configuring pod [openstack/glance-default-internal-api-0] networking: [openstack/glance-default-internal-api-0/91e65be6-32db-4fc4-a6d8-c2cba036263e:ovn-kubernetes]: error adding container to network "ovn-kubernetes": CNI request failed with status 400: '[openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] [openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] pod deleted before sandbox ADD operation began Nov 25 14:49:45 crc kubenswrapper[4879]: ' Nov 25 14:49:45 crc kubenswrapper[4879]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Nov 25 14:49:45 crc kubenswrapper[4879]: > Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.749115 4879 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Nov 25 14:49:45 crc kubenswrapper[4879]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_glance-default-internal-api-0_openstack_91e65be6-32db-4fc4-a6d8-c2cba036263e_0(a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296): error adding pod openstack_glance-default-internal-api-0 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296" Netns:"/var/run/netns/e36c30fb-e8ad-46d5-bf82-8fcfcfad2ecf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=glance-default-internal-api-0;K8S_POD_INFRA_CONTAINER_ID=a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296;K8S_POD_UID=91e65be6-32db-4fc4-a6d8-c2cba036263e" Path:"" ERRORED: error configuring pod [openstack/glance-default-internal-api-0] networking: [openstack/glance-default-internal-api-0/91e65be6-32db-4fc4-a6d8-c2cba036263e:ovn-kubernetes]: error adding container to network "ovn-kubernetes": CNI request failed with status 400: '[openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] [openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] pod deleted before sandbox ADD operation began Nov 25 14:49:45 crc kubenswrapper[4879]: ' Nov 25 14:49:45 crc kubenswrapper[4879]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Nov 25 14:49:45 crc kubenswrapper[4879]: > pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.749153 4879 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Nov 25 14:49:45 crc kubenswrapper[4879]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_glance-default-internal-api-0_openstack_91e65be6-32db-4fc4-a6d8-c2cba036263e_0(a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296): error adding pod openstack_glance-default-internal-api-0 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296" Netns:"/var/run/netns/e36c30fb-e8ad-46d5-bf82-8fcfcfad2ecf" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=glance-default-internal-api-0;K8S_POD_INFRA_CONTAINER_ID=a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296;K8S_POD_UID=91e65be6-32db-4fc4-a6d8-c2cba036263e" Path:"" ERRORED: error configuring pod [openstack/glance-default-internal-api-0] networking: [openstack/glance-default-internal-api-0/91e65be6-32db-4fc4-a6d8-c2cba036263e:ovn-kubernetes]: error adding container to network "ovn-kubernetes": CNI request failed with status 400: '[openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] [openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] pod deleted before sandbox ADD operation began Nov 25 14:49:45 crc kubenswrapper[4879]: ' Nov 25 14:49:45 crc kubenswrapper[4879]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Nov 25 14:49:45 crc kubenswrapper[4879]: > pod="openstack/glance-default-internal-api-0" Nov 25 14:49:45 crc kubenswrapper[4879]: E1125 14:49:45.749211 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"glance-default-internal-api-0_openstack(91e65be6-32db-4fc4-a6d8-c2cba036263e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"glance-default-internal-api-0_openstack(91e65be6-32db-4fc4-a6d8-c2cba036263e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_glance-default-internal-api-0_openstack_91e65be6-32db-4fc4-a6d8-c2cba036263e_0(a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296): error adding pod openstack_glance-default-internal-api-0 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296\\\" Netns:\\\"/var/run/netns/e36c30fb-e8ad-46d5-bf82-8fcfcfad2ecf\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=glance-default-internal-api-0;K8S_POD_INFRA_CONTAINER_ID=a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296;K8S_POD_UID=91e65be6-32db-4fc4-a6d8-c2cba036263e\\\" Path:\\\"\\\" ERRORED: error configuring pod [openstack/glance-default-internal-api-0] networking: [openstack/glance-default-internal-api-0/91e65be6-32db-4fc4-a6d8-c2cba036263e:ovn-kubernetes]: error adding container to network \\\"ovn-kubernetes\\\": CNI request failed with status 400: '[openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] [openstack/glance-default-internal-api-0 a8fefb3577e250be78a5648782511202772ea85b211309f5f9e468189e4c1296 network default NAD default] pod deleted before sandbox ADD operation began\\n'\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openstack/glance-default-internal-api-0" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.911983 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 25 14:49:45 crc kubenswrapper[4879]: I1125 14:49:45.953899 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:45.971369 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.003771 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-gtl9r" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.011657 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-d8nb7" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.013895 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.073913 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.085014 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.144671 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.184299 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-h8hjg" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.213617 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.215437 4879 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.215654 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4" gracePeriod=5 Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.230305 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" event={"ID":"81a1e752-3477-4e08-b151-874b0e503a1b","Type":"ContainerStarted","Data":"76655ca29150edecadd973a816be259fd127bb68de220651e1223a27cec0b5c8"} Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.231541 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.237599 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" event={"ID":"94c3a712-5baa-4789-ad81-8d4c0554d84b","Type":"ContainerStarted","Data":"a837ce7167e3ca18035a79de8bd07cd87a4abc5ffad4c5e5274a8ada4cd6649a"} Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.238404 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.241255 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" event={"ID":"89a61837-ab76-494d-a98d-268fed9bbe35","Type":"ContainerStarted","Data":"7d3365bfa65fcd9911a0ba7d9c94db864ffcc9f6696e75facc3985dd8ae09f02"} Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.241861 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.321278 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.336236 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.349840 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.370092 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.391790 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.429466 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.504916 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.627701 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.644463 4879 scope.go:117] "RemoveContainer" containerID="6d431eef944df702884f7ed955fe4b97ef32e7592aaab672e205eb555dd98d3c" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.645185 4879 scope.go:117] "RemoveContainer" containerID="fbbaa211666e6df36d932f06ee802e24a000cabc386e214f346a3e772322281a" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.645703 4879 scope.go:117] "RemoveContainer" containerID="0803087feda622bfea22aa45093484b99813743a567b0d466a022566d1770951" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.646157 4879 scope.go:117] "RemoveContainer" containerID="9a0020e247361f5ecb1fd10f69c56758427e9d274e94efb04979e9c31709228b" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.646644 4879 scope.go:117] "RemoveContainer" containerID="a94c2b349f2fe2de67a17d20079279fa2ec40c16e8f87f22ae674ffa110f2a37" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.715889 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.732208 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.736873 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.801286 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.836257 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.858010 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.908745 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 25 14:49:46 crc kubenswrapper[4879]: I1125 14:49:46.961894 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6tvg8" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.030745 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.095500 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-nthgs" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.102598 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.139106 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.140609 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.156031 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.160945 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.188655 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.272723 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.323979 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-qssh2" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.348353 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.355478 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fnz84" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.443313 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.461678 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.507395 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.509564 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-tkcc8" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.516940 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.595527 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.617348 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-fks6b" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.649260 4879 scope.go:117] "RemoveContainer" containerID="cd206d49d5c18f3ffb366c319d925bac14896315238ab6e2f35b773c107973ab" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.690558 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.718041 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.737579 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-q77vv" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.830317 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.844931 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.885140 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.890060 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.974365 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 25 14:49:47 crc kubenswrapper[4879]: I1125 14:49:47.975835 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.106401 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.106576 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.113017 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.160280 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.163528 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.204728 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.220887 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.221056 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.239413 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.261317 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-5t2l2" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.264780 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.276975 4879 generic.go:334] "Generic (PLEG): container finished" podID="3b39b6d7-9d19-4602-ad6e-c59a531818a8" containerID="d4d89736cc943bb84edda7ac4b7eae64d0233104c931c5237f67e0440f2c10a8" exitCode=1 Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.277059 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" event={"ID":"3b39b6d7-9d19-4602-ad6e-c59a531818a8","Type":"ContainerDied","Data":"d4d89736cc943bb84edda7ac4b7eae64d0233104c931c5237f67e0440f2c10a8"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.277700 4879 scope.go:117] "RemoveContainer" containerID="d4d89736cc943bb84edda7ac4b7eae64d0233104c931c5237f67e0440f2c10a8" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.284452 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" event={"ID":"c5c5776f-3970-425a-b5a7-c4c859f821e0","Type":"ContainerStarted","Data":"5d04629096cba7e4fe012b29911be8ac62b518610b03b4ca95cc029c4d2f1910"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.284705 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.290954 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" event={"ID":"2037b3b9-3099-4f88-8e56-ec28ee25efa5","Type":"ContainerStarted","Data":"cb9bf20b086400e4aa572d7899611bbeb0c993eadfb18c3d244e3b3b436ca1c6"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.291202 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.292160 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.292387 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.293771 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" event={"ID":"afbf9f55-3316-40bb-b53b-d4d96482f9d5","Type":"ContainerStarted","Data":"99eaf9639f4596cb06daf182c11b3ce978d856290dae8a22a235c8827fec0813"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.295567 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.306099 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" event={"ID":"cefe5024-a03a-427e-84a5-a4f6eac64f12","Type":"ContainerStarted","Data":"30f31c9bd48d59b08ac40e97eb0a61783046a4f109faff8baef6e10194c28d73"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.306515 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.307918 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.311881 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" event={"ID":"5c199b9d-786f-4520-a7bb-67f616b16b88","Type":"ContainerStarted","Data":"ad90724a012102c90d468122292baf2e810efe3e399a8179e27b0add1c16e88a"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.312835 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.319495 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" event={"ID":"4ca6d024-306e-4707-abb0-1b57ed1e11b6","Type":"ContainerStarted","Data":"91fbf7c3e6e2d6236ab738e54ddce8d54875d27f4c8eb47b5e9257d2604b765f"} Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.320252 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.422419 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.441331 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.496618 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.534930 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.535192 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.556812 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.758863 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-cn7hl" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.776832 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.811947 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.812080 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.812752 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:48 crc kubenswrapper[4879]: I1125 14:49:48.842036 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.036543 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.060981 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.067296 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.088581 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.089285 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.128601 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.239374 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.255524 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.256066 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.264507 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.329385 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-bf2x5" event={"ID":"3b39b6d7-9d19-4602-ad6e-c59a531818a8","Type":"ContainerStarted","Data":"ba250e609e872cd7c13e61c405f1cded102fed7876d3241accf2cb95c2d51455"} Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.421342 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.468808 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.539834 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.640960 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.660930 4879 scope.go:117] "RemoveContainer" containerID="33b75d3d991f21a884351432091d48418cb58acce93d56c4ce99e2bca63bae73" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.667391 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.685881 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.774093 4879 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.786800 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.873792 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 25 14:49:49 crc kubenswrapper[4879]: I1125 14:49:49.956870 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-h5stt" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.016465 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.078224 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.098087 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.100974 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-txg5q" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.112485 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.275711 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.312391 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.337700 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" event={"ID":"b7d6b37e-0aff-4496-b240-7770d1d23827","Type":"ContainerStarted","Data":"1f822b13c077f54de3809b97f5dc6e29fbad305a30f440aec7aaa022c5f9b5d9"} Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.338957 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.342365 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"91e65be6-32db-4fc4-a6d8-c2cba036263e","Type":"ContainerStarted","Data":"7df6e0d385dcb716d1fece062894b107c5b928ec549d6c8fdeb4047821e62057"} Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.420792 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.501606 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.711433 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.712049 4879 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-nxpcm" Nov 25 14:49:50 crc kubenswrapper[4879]: I1125 14:49:50.906727 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.093275 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.197313 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.277170 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.327106 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" podUID="2df9d6c8-3f76-4f2d-b7ff-c4874549babd" containerName="cert-manager-controller" probeResult="failure" output="Get \"http://10.217.0.70:9403/livez\": dial tcp 10.217.0.70:9403: connect: connection refused" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.341414 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.353109 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"91e65be6-32db-4fc4-a6d8-c2cba036263e","Type":"ContainerStarted","Data":"75fffef0ceaa93e38bd59fcdeb4600d29283f324dbf62e1fe78cd408a68c4581"} Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.353228 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"91e65be6-32db-4fc4-a6d8-c2cba036263e","Type":"ContainerStarted","Data":"da67a389b74431ca72138a70f07b00255a1ee909f216b0cbf4c75bb7fc5b974d"} Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.354779 4879 generic.go:334] "Generic (PLEG): container finished" podID="2df9d6c8-3f76-4f2d-b7ff-c4874549babd" containerID="7bc07413625658bd1bc2f69508dbf81902d43a663017c4cbe509d4aae8decc02" exitCode=1 Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.354856 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" event={"ID":"2df9d6c8-3f76-4f2d-b7ff-c4874549babd","Type":"ContainerDied","Data":"7bc07413625658bd1bc2f69508dbf81902d43a663017c4cbe509d4aae8decc02"} Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.355518 4879 scope.go:117] "RemoveContainer" containerID="7bc07413625658bd1bc2f69508dbf81902d43a663017c4cbe509d4aae8decc02" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.408884 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.419025 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=10.419008725 podStartE2EDuration="10.419008725s" podCreationTimestamp="2025-11-25 14:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:49:51.417505174 +0000 UTC m=+1483.020918245" watchObservedRunningTime="2025-11-25 14:49:51.419008725 +0000 UTC m=+1483.022421786" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.487305 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.695365 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.696385 4879 scope.go:117] "RemoveContainer" containerID="f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.696646 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.697419 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.721757 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.721809 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.722539 4879 scope.go:117] "RemoveContainer" containerID="fe27271c9733ce8344e4d95ce86a766a52f0e5f73ccacc4e8abf22394af74006" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.722790 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.751455 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.752314 4879 scope.go:117] "RemoveContainer" containerID="47fc7bcf3bc98656a9057f632cfcb6a6ea1c97bc7fb00acc875c805a97b3adca" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.752602 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6)\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.752814 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.773514 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.774597 4879 scope.go:117] "RemoveContainer" containerID="fb6423a6400a36b8cf279794eeac07107a047c58761d76b09b14f15acd9bfe22" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.774946 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f)\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.775064 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.788423 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.788946 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.789382 4879 scope.go:117] "RemoveContainer" containerID="c180e2b54ea469bf523a7fd603a35b6a819eba05ac0092fad01e0b1295e53af8" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.789663 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4)\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.791289 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bl9x8" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.837555 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.837827 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.838586 4879 scope.go:117] "RemoveContainer" containerID="a9beac7b90924a6f4456204338f4ceb6c0e3ab40accdee6fa9866ce92e348361" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.839718 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=ironic-operator-controller-manager-5bfcdc958c-8tgzv_openstack-operators(be0d238d-5b08-42e1-ac21-4e00592ab433)\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podUID="be0d238d-5b08-42e1-ac21-4e00592ab433" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.960329 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.993286 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.993329 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.993982 4879 scope.go:117] "RemoveContainer" containerID="4fe022349c5fa8bdaf90ede1af0ed3dbbe29d3918a0e521b2b9ea2b62cb3d230" Nov 25 14:49:51 crc kubenswrapper[4879]: E1125 14:49:51.994225 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b)\"" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.994841 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 25 14:49:51 crc kubenswrapper[4879]: I1125 14:49:51.994964 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.014320 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.014382 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.014925 4879 scope.go:117] "RemoveContainer" containerID="0a2587a52f395489e31410f9b820f5562b35ac69a94bb20b37486f61d4b6891b" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.015212 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=mariadb-operator-controller-manager-cb6c4fdb7-95pqd_openstack-operators(279425db-228b-4697-864f-e50d2eb66012)\"" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" podUID="279425db-228b-4697-864f-e50d2eb66012" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.113863 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.114292 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.115468 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.116141 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.116295 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.116377 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.116426 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.116552 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.116589 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.117286 4879 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.117410 4879 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.117503 4879 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.117590 4879 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.126017 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.131166 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.131233 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.132010 4879 scope.go:117] "RemoveContainer" containerID="315978420b79688ac7d29ea621445d8102ff41d347924d89037951f85cd8299f" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.132328 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=neutron-operator-controller-manager-7c57c8bbc4-7mc5t_openstack-operators(8f91f389-91ad-4a56-9e71-5cf7bb88db01)\"" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" podUID="8f91f389-91ad-4a56-9e71-5cf7bb88db01" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.144278 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-mq9fj" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.161679 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.220324 4879 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.265779 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-pr6k5" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.286234 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-5rjs9" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.354941 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-nk7tw" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.366050 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-z6dzl" event={"ID":"2df9d6c8-3f76-4f2d-b7ff-c4874549babd","Type":"ContainerStarted","Data":"627787c7b0ae6a0d899f14e97c4282392c770537882b686ff72d34976590e614"} Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.370101 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.370173 4879 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4" exitCode=137 Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.370800 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.370883 4879 scope.go:117] "RemoveContainer" containerID="19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.370985 4879 scope.go:117] "RemoveContainer" containerID="a9beac7b90924a6f4456204338f4ceb6c0e3ab40accdee6fa9866ce92e348361" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.371459 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=ironic-operator-controller-manager-5bfcdc958c-8tgzv_openstack-operators(be0d238d-5b08-42e1-ac21-4e00592ab433)\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" podUID="be0d238d-5b08-42e1-ac21-4e00592ab433" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.372220 4879 scope.go:117] "RemoveContainer" containerID="c180e2b54ea469bf523a7fd603a35b6a819eba05ac0092fad01e0b1295e53af8" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.372536 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=glance-operator-controller-manager-68b95954c9-gb5r4_openstack-operators(e9e99ec6-68ec-4d48-847b-b5f350dc1fc4)\"" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" podUID="e9e99ec6-68ec-4d48-847b-b5f350dc1fc4" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.373647 4879 scope.go:117] "RemoveContainer" containerID="47fc7bcf3bc98656a9057f632cfcb6a6ea1c97bc7fb00acc875c805a97b3adca" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.373874 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=cinder-operator-controller-manager-79856dc55c-sl6x7_openstack-operators(461b714a-4ee7-40ab-99d3-cd78552b52c6)\"" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" podUID="461b714a-4ee7-40ab-99d3-cd78552b52c6" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.374095 4879 scope.go:117] "RemoveContainer" containerID="fb6423a6400a36b8cf279794eeac07107a047c58761d76b09b14f15acd9bfe22" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.374331 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=heat-operator-controller-manager-774b86978c-w8cc5_openstack-operators(0c0a5e22-8150-48b6-9b4f-a9b18bb4960f)\"" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" podUID="0c0a5e22-8150-48b6-9b4f-a9b18bb4960f" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.374486 4879 scope.go:117] "RemoveContainer" containerID="f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.374760 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.387787 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-pf28x" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.392100 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-zvb94" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.401871 4879 scope.go:117] "RemoveContainer" containerID="19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.402637 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4\": container with ID starting with 19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4 not found: ID does not exist" containerID="19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.402698 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4"} err="failed to get container status \"19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4\": rpc error: code = NotFound desc = could not find container \"19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4\": container with ID starting with 19c40030c341d5d40e27664cafbd1aa5b697fc6d2d2ba45776223056e986f9b4 not found: ID does not exist" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.479794 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.495565 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-8tdb9" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.584622 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.584699 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:49:52 crc kubenswrapper[4879]: I1125 14:49:52.585473 4879 scope.go:117] "RemoveContainer" containerID="d61523e98f0c1bf85398f2be6349e8b5cd81cabe101a1b8141a1e88cb3030bc8" Nov 25 14:49:52 crc kubenswrapper[4879]: E1125 14:49:52.585745 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=watcher-operator-controller-manager-864885998-vnr67_openstack-operators(06ee2ae7-d534-4170-9862-53a2580c39ce)\"" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podUID="06ee2ae7-d534-4170-9862-53a2580c39ce" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.381228 4879 scope.go:117] "RemoveContainer" containerID="d61523e98f0c1bf85398f2be6349e8b5cd81cabe101a1b8141a1e88cb3030bc8" Nov 25 14:49:53 crc kubenswrapper[4879]: E1125 14:49:53.381500 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=watcher-operator-controller-manager-864885998-vnr67_openstack-operators(06ee2ae7-d534-4170-9862-53a2580c39ce)\"" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" podUID="06ee2ae7-d534-4170-9862-53a2580c39ce" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.644746 4879 scope.go:117] "RemoveContainer" containerID="531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15" Nov 25 14:49:53 crc kubenswrapper[4879]: E1125 14:49:53.645008 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=metallb-operator-controller-manager-7965d46465-b9w8p_metallb-system(1bf9f1b8-1476-4f3a-963b-986a0ae66426)\"" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" podUID="1bf9f1b8-1476-4f3a-963b-986a0ae66426" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.655794 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.656072 4879 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.672594 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.672627 4879 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="f933ed72-9de8-4954-a43b-94572ad4de01" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.682329 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.682377 4879 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="f933ed72-9de8-4954-a43b-94572ad4de01" Nov 25 14:49:53 crc kubenswrapper[4879]: I1125 14:49:53.707565 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 25 14:49:54 crc kubenswrapper[4879]: I1125 14:49:54.644963 4879 scope.go:117] "RemoveContainer" containerID="f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074" Nov 25 14:49:54 crc kubenswrapper[4879]: E1125 14:49:54.645432 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=operator pod=rabbitmq-cluster-operator-manager-668c99d594-zj2cr_openstack-operators(ee61acb4-f03b-4e5c-996c-3b4436b8e676)\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" podUID="ee61acb4-f03b-4e5c-996c-3b4436b8e676" Nov 25 14:49:54 crc kubenswrapper[4879]: I1125 14:49:54.663492 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.368173 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.368595 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.403845 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.410094 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerStarted","Data":"ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c"} Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.410164 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.410312 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 25 14:49:55 crc kubenswrapper[4879]: I1125 14:49:55.414478 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:56 crc kubenswrapper[4879]: I1125 14:49:56.261037 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-77bf44fb75-xdhlf" Nov 25 14:49:56 crc kubenswrapper[4879]: I1125 14:49:56.425473 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:58 crc kubenswrapper[4879]: I1125 14:49:58.567892 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 14:49:58 crc kubenswrapper[4879]: I1125 14:49:58.568298 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:49:58 crc kubenswrapper[4879]: I1125 14:49:58.569012 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 14:50:00 crc kubenswrapper[4879]: I1125 14:50:00.910543 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 25 14:50:01 crc kubenswrapper[4879]: I1125 14:50:01.803514 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-ltnmm" Nov 25 14:50:02 crc kubenswrapper[4879]: I1125 14:50:02.155598 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-txbsq" Nov 25 14:50:02 crc kubenswrapper[4879]: I1125 14:50:02.645203 4879 scope.go:117] "RemoveContainer" containerID="4fe022349c5fa8bdaf90ede1af0ed3dbbe29d3918a0e521b2b9ea2b62cb3d230" Nov 25 14:50:02 crc kubenswrapper[4879]: E1125 14:50:02.645455 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=manila-operator-controller-manager-58bb8d67cc-m7d57_openstack-operators(bac13c1a-af96-4cb0-a802-ef2086f9f06b)\"" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" podUID="bac13c1a-af96-4cb0-a802-ef2086f9f06b" Nov 25 14:50:03 crc kubenswrapper[4879]: I1125 14:50:03.644523 4879 scope.go:117] "RemoveContainer" containerID="fe27271c9733ce8344e4d95ce86a766a52f0e5f73ccacc4e8abf22394af74006" Nov 25 14:50:03 crc kubenswrapper[4879]: E1125 14:50:03.644886 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=designate-operator-controller-manager-7d695c9b56-4z42w_openstack-operators(230849f3-daef-4f23-9839-8f0bd76d8e4a)\"" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" podUID="230849f3-daef-4f23-9839-8f0bd76d8e4a" Nov 25 14:50:03 crc kubenswrapper[4879]: I1125 14:50:03.645645 4879 scope.go:117] "RemoveContainer" containerID="c180e2b54ea469bf523a7fd603a35b6a819eba05ac0092fad01e0b1295e53af8" Nov 25 14:50:03 crc kubenswrapper[4879]: I1125 14:50:03.645778 4879 scope.go:117] "RemoveContainer" containerID="fb6423a6400a36b8cf279794eeac07107a047c58761d76b09b14f15acd9bfe22" Nov 25 14:50:03 crc kubenswrapper[4879]: I1125 14:50:03.645949 4879 scope.go:117] "RemoveContainer" containerID="f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2" Nov 25 14:50:03 crc kubenswrapper[4879]: E1125 14:50:03.646292 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=barbican-operator-controller-manager-86dc4d89c8-vf4mk_openstack-operators(a8b8b0f7-f988-46b1-b88f-751261b1c6a1)\"" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" podUID="a8b8b0f7-f988-46b1-b88f-751261b1c6a1" Nov 25 14:50:04 crc kubenswrapper[4879]: I1125 14:50:04.494483 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" event={"ID":"0c0a5e22-8150-48b6-9b4f-a9b18bb4960f","Type":"ContainerStarted","Data":"0a4758e954582655e96fd682329de68a79445908fca1cb3a3d005dcd50dea13d"} Nov 25 14:50:04 crc kubenswrapper[4879]: I1125 14:50:04.495277 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:50:04 crc kubenswrapper[4879]: I1125 14:50:04.497329 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" event={"ID":"e9e99ec6-68ec-4d48-847b-b5f350dc1fc4","Type":"ContainerStarted","Data":"855a85a8104016462e0fc2160ae2feb7031ec35adb6a03daf1564e8696885da7"} Nov 25 14:50:04 crc kubenswrapper[4879]: I1125 14:50:04.498066 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:50:04 crc kubenswrapper[4879]: I1125 14:50:04.644355 4879 scope.go:117] "RemoveContainer" containerID="0a2587a52f395489e31410f9b820f5562b35ac69a94bb20b37486f61d4b6891b" Nov 25 14:50:05 crc kubenswrapper[4879]: I1125 14:50:05.510281 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" event={"ID":"279425db-228b-4697-864f-e50d2eb66012","Type":"ContainerStarted","Data":"c3f62996f5eb1cfb6add1170ecf374882f7ada0eb5d91d2cd814b6e8bba140c8"} Nov 25 14:50:05 crc kubenswrapper[4879]: I1125 14:50:05.511208 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:50:05 crc kubenswrapper[4879]: I1125 14:50:05.644313 4879 scope.go:117] "RemoveContainer" containerID="315978420b79688ac7d29ea621445d8102ff41d347924d89037951f85cd8299f" Nov 25 14:50:06 crc kubenswrapper[4879]: I1125 14:50:06.522652 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" event={"ID":"8f91f389-91ad-4a56-9e71-5cf7bb88db01","Type":"ContainerStarted","Data":"58f3cdc1e6d37718a1512c8d363da17c106b76783b6da53671212d065be9e156"} Nov 25 14:50:06 crc kubenswrapper[4879]: I1125 14:50:06.523262 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:50:06 crc kubenswrapper[4879]: I1125 14:50:06.645255 4879 scope.go:117] "RemoveContainer" containerID="a9beac7b90924a6f4456204338f4ceb6c0e3ab40accdee6fa9866ce92e348361" Nov 25 14:50:06 crc kubenswrapper[4879]: I1125 14:50:06.646630 4879 scope.go:117] "RemoveContainer" containerID="47fc7bcf3bc98656a9057f632cfcb6a6ea1c97bc7fb00acc875c805a97b3adca" Nov 25 14:50:07 crc kubenswrapper[4879]: I1125 14:50:07.533811 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" event={"ID":"be0d238d-5b08-42e1-ac21-4e00592ab433","Type":"ContainerStarted","Data":"b2613cb4a8e881f96122b3fa0159540fb5ae47d327bc46137ff496cf38928efc"} Nov 25 14:50:07 crc kubenswrapper[4879]: I1125 14:50:07.534356 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:50:07 crc kubenswrapper[4879]: I1125 14:50:07.537076 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" event={"ID":"461b714a-4ee7-40ab-99d3-cd78552b52c6","Type":"ContainerStarted","Data":"4990d8d034f4392790736257d2352364cb7086d926f1aebf853023502a961d99"} Nov 25 14:50:07 crc kubenswrapper[4879]: I1125 14:50:07.537354 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:50:07 crc kubenswrapper[4879]: I1125 14:50:07.647282 4879 scope.go:117] "RemoveContainer" containerID="f5c568c948d9bffa9ddd89e9d74829b576b8ddb84a8ed49ab2db8b3228541074" Nov 25 14:50:08 crc kubenswrapper[4879]: I1125 14:50:08.548666 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zj2cr" event={"ID":"ee61acb4-f03b-4e5c-996c-3b4436b8e676","Type":"ContainerStarted","Data":"fbe2670c509e68ac3021f3087384bbd65fc37907b7da2f2a6439c53e11e69410"} Nov 25 14:50:08 crc kubenswrapper[4879]: I1125 14:50:08.644819 4879 scope.go:117] "RemoveContainer" containerID="d61523e98f0c1bf85398f2be6349e8b5cd81cabe101a1b8141a1e88cb3030bc8" Nov 25 14:50:08 crc kubenswrapper[4879]: I1125 14:50:08.645297 4879 scope.go:117] "RemoveContainer" containerID="531bea3162c03078afd8c856ff5bad7a275de96d7fb3e62e1970942af638ce15" Nov 25 14:50:09 crc kubenswrapper[4879]: I1125 14:50:09.560871 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" event={"ID":"1bf9f1b8-1476-4f3a-963b-986a0ae66426","Type":"ContainerStarted","Data":"cb041e999854999b7a9c9bc0e9396ddb1fa30537074371ade4d14d53c0097d6e"} Nov 25 14:50:09 crc kubenswrapper[4879]: I1125 14:50:09.561408 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:50:09 crc kubenswrapper[4879]: I1125 14:50:09.565240 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" event={"ID":"06ee2ae7-d534-4170-9862-53a2580c39ce","Type":"ContainerStarted","Data":"2fb1e2cc24219d7287ea0bb2b1a160b24f3b8c82705afd76e4728aaaf1a1fa08"} Nov 25 14:50:09 crc kubenswrapper[4879]: I1125 14:50:09.565535 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:50:11 crc kubenswrapper[4879]: I1125 14:50:11.753590 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-sl6x7" Nov 25 14:50:11 crc kubenswrapper[4879]: I1125 14:50:11.774727 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-774b86978c-w8cc5" Nov 25 14:50:11 crc kubenswrapper[4879]: I1125 14:50:11.799921 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-gb5r4" Nov 25 14:50:11 crc kubenswrapper[4879]: I1125 14:50:11.839212 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-8tgzv" Nov 25 14:50:12 crc kubenswrapper[4879]: I1125 14:50:12.017449 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-95pqd" Nov 25 14:50:12 crc kubenswrapper[4879]: I1125 14:50:12.133160 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-7mc5t" Nov 25 14:50:14 crc kubenswrapper[4879]: I1125 14:50:14.645004 4879 scope.go:117] "RemoveContainer" containerID="fe27271c9733ce8344e4d95ce86a766a52f0e5f73ccacc4e8abf22394af74006" Nov 25 14:50:15 crc kubenswrapper[4879]: I1125 14:50:15.616320 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" event={"ID":"230849f3-daef-4f23-9839-8f0bd76d8e4a","Type":"ContainerStarted","Data":"84b408e3ace7085594c64e43a529e5a2851ca18b6448a420f95b82a80b322922"} Nov 25 14:50:15 crc kubenswrapper[4879]: I1125 14:50:15.616825 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:50:17 crc kubenswrapper[4879]: I1125 14:50:17.651024 4879 scope.go:117] "RemoveContainer" containerID="4fe022349c5fa8bdaf90ede1af0ed3dbbe29d3918a0e521b2b9ea2b62cb3d230" Nov 25 14:50:17 crc kubenswrapper[4879]: I1125 14:50:17.651850 4879 scope.go:117] "RemoveContainer" containerID="f49ec0ddc6a5bd0f7f3a4865d69b166a3d29d8fbcc81a2b5bd96c30b0af9add2" Nov 25 14:50:19 crc kubenswrapper[4879]: I1125 14:50:19.342316 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" event={"ID":"bac13c1a-af96-4cb0-a802-ef2086f9f06b","Type":"ContainerStarted","Data":"443d90f1b1c2262c708c395b19955cd4d0d232b46795494c69d07e051194dc9a"} Nov 25 14:50:19 crc kubenswrapper[4879]: I1125 14:50:19.345137 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:50:19 crc kubenswrapper[4879]: I1125 14:50:19.349376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" event={"ID":"a8b8b0f7-f988-46b1-b88f-751261b1c6a1","Type":"ContainerStarted","Data":"0d81c831fbe3d5bb3b04621d62d44e1aa510ad149cbcf3161d9f05c24b4b346f"} Nov 25 14:50:19 crc kubenswrapper[4879]: I1125 14:50:19.349750 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:50:21 crc kubenswrapper[4879]: I1125 14:50:21.721757 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-4z42w" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.570006 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-94fd8ccd4-vp796"] Nov 25 14:50:22 crc kubenswrapper[4879]: E1125 14:50:22.570797 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.570817 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.571016 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.572063 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.579505 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.580164 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.580391 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vjd28" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.580691 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.581676 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.586480 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-864885998-vnr67" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.589630 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-94fd8ccd4-vp796"] Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679563 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-combined-ca-bundle\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679625 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvfkw\" (UniqueName: \"kubernetes.io/projected/73963218-ce5b-4813-8224-27ad7b69d0b3-kube-api-access-mvfkw\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679688 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-public-tls-certs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679713 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73963218-ce5b-4813-8224-27ad7b69d0b3-logs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679741 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-internal-tls-certs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679800 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-scripts\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.679851 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-config-data\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781383 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-combined-ca-bundle\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781457 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvfkw\" (UniqueName: \"kubernetes.io/projected/73963218-ce5b-4813-8224-27ad7b69d0b3-kube-api-access-mvfkw\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781542 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-public-tls-certs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781560 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73963218-ce5b-4813-8224-27ad7b69d0b3-logs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781584 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-internal-tls-certs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781621 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-scripts\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.781911 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-config-data\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.782475 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73963218-ce5b-4813-8224-27ad7b69d0b3-logs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.788523 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-internal-tls-certs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.788909 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-combined-ca-bundle\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.790707 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-public-tls-certs\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.794625 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-config-data\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.795396 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-scripts\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.801583 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvfkw\" (UniqueName: \"kubernetes.io/projected/73963218-ce5b-4813-8224-27ad7b69d0b3-kube-api-access-mvfkw\") pod \"placement-94fd8ccd4-vp796\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.830836 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.831155 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-central-agent" containerID="cri-o://963285dbb567e3fb50840338ce65066a0f57286a081eee1e4fde35ff6d3a7ae7" gracePeriod=30 Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.831225 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="sg-core" containerID="cri-o://e71cfe4c8214f91984fbd214b7b60fe3f8777249c064f99b0f5735128c0af738" gracePeriod=30 Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.831356 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-notification-agent" containerID="cri-o://4b3e2f1a92c3ec318d8860400eed136cf021710c1bf69b80c09ebe60b3bd9674" gracePeriod=30 Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.831225 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="proxy-httpd" containerID="cri-o://6ccf158ca17dbc7206f394a97a3f6e39c2fdb7c9caace41a3e5e0fd60578c244" gracePeriod=30 Nov 25 14:50:22 crc kubenswrapper[4879]: I1125 14:50:22.900596 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:23 crc kubenswrapper[4879]: I1125 14:50:23.388783 4879 generic.go:334] "Generic (PLEG): container finished" podID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerID="6ccf158ca17dbc7206f394a97a3f6e39c2fdb7c9caace41a3e5e0fd60578c244" exitCode=0 Nov 25 14:50:23 crc kubenswrapper[4879]: I1125 14:50:23.388819 4879 generic.go:334] "Generic (PLEG): container finished" podID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerID="e71cfe4c8214f91984fbd214b7b60fe3f8777249c064f99b0f5735128c0af738" exitCode=2 Nov 25 14:50:23 crc kubenswrapper[4879]: I1125 14:50:23.388843 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerDied","Data":"6ccf158ca17dbc7206f394a97a3f6e39c2fdb7c9caace41a3e5e0fd60578c244"} Nov 25 14:50:23 crc kubenswrapper[4879]: I1125 14:50:23.388875 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerDied","Data":"e71cfe4c8214f91984fbd214b7b60fe3f8777249c064f99b0f5735128c0af738"} Nov 25 14:50:23 crc kubenswrapper[4879]: W1125 14:50:23.482914 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73963218_ce5b_4813_8224_27ad7b69d0b3.slice/crio-1fef617d7cf95a614317c9186fe0add4c2f7399383016661beea0542d9d7dd2d WatchSource:0}: Error finding container 1fef617d7cf95a614317c9186fe0add4c2f7399383016661beea0542d9d7dd2d: Status 404 returned error can't find the container with id 1fef617d7cf95a614317c9186fe0add4c2f7399383016661beea0542d9d7dd2d Nov 25 14:50:23 crc kubenswrapper[4879]: I1125 14:50:23.483494 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-94fd8ccd4-vp796"] Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.401474 4879 generic.go:334] "Generic (PLEG): container finished" podID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerID="963285dbb567e3fb50840338ce65066a0f57286a081eee1e4fde35ff6d3a7ae7" exitCode=0 Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.401516 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerDied","Data":"963285dbb567e3fb50840338ce65066a0f57286a081eee1e4fde35ff6d3a7ae7"} Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.406153 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94fd8ccd4-vp796" event={"ID":"73963218-ce5b-4813-8224-27ad7b69d0b3","Type":"ContainerStarted","Data":"9c3fd5bfe1c4a686adda7380350f15818f581370513fbd5cb85f18e67e6f730e"} Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.406205 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94fd8ccd4-vp796" event={"ID":"73963218-ce5b-4813-8224-27ad7b69d0b3","Type":"ContainerStarted","Data":"1b6bde64e67dfaaeb2ea7d1af1b7e358481fefd8e638d1c70034361c14cd5032"} Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.406219 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94fd8ccd4-vp796" event={"ID":"73963218-ce5b-4813-8224-27ad7b69d0b3","Type":"ContainerStarted","Data":"1fef617d7cf95a614317c9186fe0add4c2f7399383016661beea0542d9d7dd2d"} Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.406326 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.455414 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6c8985d949-st5vd"] Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.459937 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.459906 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-94fd8ccd4-vp796" podStartSLOduration=2.459881146 podStartE2EDuration="2.459881146s" podCreationTimestamp="2025-11-25 14:50:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:24.446464116 +0000 UTC m=+1516.049877197" watchObservedRunningTime="2025-11-25 14:50:24.459881146 +0000 UTC m=+1516.063294217" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.478458 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.478563 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.486200 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.505322 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6c8985d949-st5vd"] Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.622677 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-public-tls-certs\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.622993 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-combined-ca-bundle\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.623171 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-etc-swift\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.623373 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-run-httpd\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.623516 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5rfv\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-kube-api-access-r5rfv\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.623628 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-internal-tls-certs\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.623742 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-log-httpd\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.623848 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-config-data\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725527 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-log-httpd\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725605 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-config-data\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725642 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-public-tls-certs\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725705 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-combined-ca-bundle\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725776 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-etc-swift\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725850 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-run-httpd\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725884 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5rfv\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-kube-api-access-r5rfv\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.725911 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-internal-tls-certs\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.726864 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-log-httpd\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.727250 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-run-httpd\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.733246 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-etc-swift\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.733914 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-public-tls-certs\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.734537 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-internal-tls-certs\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.735551 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-config-data\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.737236 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-combined-ca-bundle\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.759731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5rfv\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-kube-api-access-r5rfv\") pod \"swift-proxy-6c8985d949-st5vd\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:24 crc kubenswrapper[4879]: I1125 14:50:24.808438 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:25 crc kubenswrapper[4879]: I1125 14:50:25.226498 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6c8985d949-st5vd"] Nov 25 14:50:25 crc kubenswrapper[4879]: W1125 14:50:25.237615 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod552e169f_1bf1_4d0b_802a_da9720c6a35d.slice/crio-4e30297ebf7b7469ed19c893a539c1adb048a13b97bbc6f1af37524091369c1a WatchSource:0}: Error finding container 4e30297ebf7b7469ed19c893a539c1adb048a13b97bbc6f1af37524091369c1a: Status 404 returned error can't find the container with id 4e30297ebf7b7469ed19c893a539c1adb048a13b97bbc6f1af37524091369c1a Nov 25 14:50:25 crc kubenswrapper[4879]: I1125 14:50:25.416546 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6c8985d949-st5vd" event={"ID":"552e169f-1bf1-4d0b-802a-da9720c6a35d","Type":"ContainerStarted","Data":"4e30297ebf7b7469ed19c893a539c1adb048a13b97bbc6f1af37524091369c1a"} Nov 25 14:50:25 crc kubenswrapper[4879]: I1125 14:50:25.417412 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:26 crc kubenswrapper[4879]: I1125 14:50:26.428010 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6c8985d949-st5vd" event={"ID":"552e169f-1bf1-4d0b-802a-da9720c6a35d","Type":"ContainerStarted","Data":"1f411d7aa7397b9a94d385fe775d63a5aaf0a0e6eba3463194e645675a3ae6cf"} Nov 25 14:50:26 crc kubenswrapper[4879]: I1125 14:50:26.428661 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:26 crc kubenswrapper[4879]: I1125 14:50:26.428705 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6c8985d949-st5vd" event={"ID":"552e169f-1bf1-4d0b-802a-da9720c6a35d","Type":"ContainerStarted","Data":"3f48970c185dfaf1d95c8ae3db2de1e8b88ce02bf6cadd6e87e1ea0d966ae78b"} Nov 25 14:50:26 crc kubenswrapper[4879]: I1125 14:50:26.428730 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:26 crc kubenswrapper[4879]: I1125 14:50:26.453097 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6c8985d949-st5vd" podStartSLOduration=2.453059365 podStartE2EDuration="2.453059365s" podCreationTimestamp="2025-11-25 14:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:26.447473431 +0000 UTC m=+1518.050886512" watchObservedRunningTime="2025-11-25 14:50:26.453059365 +0000 UTC m=+1518.056472436" Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.450238 4879 generic.go:334] "Generic (PLEG): container finished" podID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerID="4b3e2f1a92c3ec318d8860400eed136cf021710c1bf69b80c09ebe60b3bd9674" exitCode=0 Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.450325 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerDied","Data":"4b3e2f1a92c3ec318d8860400eed136cf021710c1bf69b80c09ebe60b3bd9674"} Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.694622 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cws7f"] Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.739031 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cws7f"] Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.739208 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.891747 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7kdzs"] Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.902659 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.904152 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-utilities\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.913441 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-catalog-content\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.913876 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbvmj\" (UniqueName: \"kubernetes.io/projected/78bf95c7-c5cc-4ea8-80de-f76070054d12-kube-api-access-kbvmj\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.908690 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7kdzs"] Nov 25 14:50:28 crc kubenswrapper[4879]: I1125 14:50:28.921897 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.015508 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-run-httpd\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.015804 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-log-httpd\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.015896 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-combined-ca-bundle\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.015986 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-scripts\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016108 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljwhs\" (UniqueName: \"kubernetes.io/projected/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-kube-api-access-ljwhs\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016231 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-config-data\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016338 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-sg-core-conf-yaml\") pod \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\" (UID: \"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1\") " Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016169 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016709 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ng8mx\" (UniqueName: \"kubernetes.io/projected/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-kube-api-access-ng8mx\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016797 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbvmj\" (UniqueName: \"kubernetes.io/projected/78bf95c7-c5cc-4ea8-80de-f76070054d12-kube-api-access-kbvmj\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016904 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-utilities\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016985 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-utilities\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.017067 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-catalog-content\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.017262 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-catalog-content\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.017385 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.017506 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-utilities\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.016796 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.017869 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-catalog-content\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.024889 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-kube-api-access-ljwhs" (OuterVolumeSpecName: "kube-api-access-ljwhs") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "kube-api-access-ljwhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.031298 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-scripts" (OuterVolumeSpecName: "scripts") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.038746 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbvmj\" (UniqueName: \"kubernetes.io/projected/78bf95c7-c5cc-4ea8-80de-f76070054d12-kube-api-access-kbvmj\") pod \"certified-operators-cws7f\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.065944 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.071517 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.111225 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.120496 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng8mx\" (UniqueName: \"kubernetes.io/projected/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-kube-api-access-ng8mx\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.120802 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-utilities\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.121856 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-catalog-content\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.121964 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.121980 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.121996 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.122007 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljwhs\" (UniqueName: \"kubernetes.io/projected/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-kube-api-access-ljwhs\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.122019 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.122486 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-catalog-content\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.123192 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-utilities\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.145161 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ng8mx\" (UniqueName: \"kubernetes.io/projected/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-kube-api-access-ng8mx\") pod \"community-operators-7kdzs\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.151490 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-config-data" (OuterVolumeSpecName: "config-data") pod "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" (UID: "49f50af1-0ea3-44a7-8cd9-a22a4009f6a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.228414 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.243776 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.470541 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"49f50af1-0ea3-44a7-8cd9-a22a4009f6a1","Type":"ContainerDied","Data":"a462db2127462daefe35b313babef30703afdeca15eeaf43aa4343cf28b198dc"} Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.470606 4879 scope.go:117] "RemoveContainer" containerID="6ccf158ca17dbc7206f394a97a3f6e39c2fdb7c9caace41a3e5e0fd60578c244" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.470799 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.526359 4879 scope.go:117] "RemoveContainer" containerID="e71cfe4c8214f91984fbd214b7b60fe3f8777249c064f99b0f5735128c0af738" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.539293 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.564188 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.576198 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:29 crc kubenswrapper[4879]: E1125 14:50:29.583562 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-central-agent" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583603 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-central-agent" Nov 25 14:50:29 crc kubenswrapper[4879]: E1125 14:50:29.583631 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="proxy-httpd" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583656 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="proxy-httpd" Nov 25 14:50:29 crc kubenswrapper[4879]: E1125 14:50:29.583674 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="sg-core" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583684 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="sg-core" Nov 25 14:50:29 crc kubenswrapper[4879]: E1125 14:50:29.583714 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-notification-agent" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583720 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-notification-agent" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583896 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="sg-core" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583913 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-central-agent" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583925 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="proxy-httpd" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.583932 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" containerName="ceilometer-notification-agent" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.585437 4879 scope.go:117] "RemoveContainer" containerID="4b3e2f1a92c3ec318d8860400eed136cf021710c1bf69b80c09ebe60b3bd9674" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.585505 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.593506 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.593744 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.599636 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.648297 4879 scope.go:117] "RemoveContainer" containerID="963285dbb567e3fb50840338ce65066a0f57286a081eee1e4fde35ff6d3a7ae7" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.675925 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49f50af1-0ea3-44a7-8cd9-a22a4009f6a1" path="/var/lib/kubelet/pods/49f50af1-0ea3-44a7-8cd9-a22a4009f6a1/volumes" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.676892 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cws7f"] Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741352 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-log-httpd\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741456 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-config-data\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741493 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741597 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-scripts\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741644 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-run-httpd\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741683 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2kfg\" (UniqueName: \"kubernetes.io/projected/894bf77b-065e-4330-b45e-cf11903b9b5b-kube-api-access-c2kfg\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.741743 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.775169 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7kdzs"] Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.843423 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-log-httpd\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.844260 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-log-httpd\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.844514 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-config-data\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.844628 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.844904 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-scripts\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.844985 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-run-httpd\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.845076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2kfg\" (UniqueName: \"kubernetes.io/projected/894bf77b-065e-4330-b45e-cf11903b9b5b-kube-api-access-c2kfg\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.845230 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.849334 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-run-httpd\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.851548 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.852477 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-scripts\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.868130 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.870551 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-config-data\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.871946 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2kfg\" (UniqueName: \"kubernetes.io/projected/894bf77b-065e-4330-b45e-cf11903b9b5b-kube-api-access-c2kfg\") pod \"ceilometer-0\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " pod="openstack/ceilometer-0" Nov 25 14:50:29 crc kubenswrapper[4879]: I1125 14:50:29.918611 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:30 crc kubenswrapper[4879]: I1125 14:50:30.502204 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerStarted","Data":"f61807d800a523eb60ef669ed0a0ee6c27196ac01b650534b03debacb3929e60"} Nov 25 14:50:30 crc kubenswrapper[4879]: I1125 14:50:30.508271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cws7f" event={"ID":"78bf95c7-c5cc-4ea8-80de-f76070054d12","Type":"ContainerStarted","Data":"eb6fa7c645d3a05c1cf1893b5be33425c236e85cd844d3e70a613d932caf3418"} Nov 25 14:50:30 crc kubenswrapper[4879]: I1125 14:50:30.549804 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.518567 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerStarted","Data":"af5d6de484b09fbb011bf123b39f5ab7a555ff8a6296fc9f246ee4015cf139e2"} Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.526650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerStarted","Data":"e1a0ed94e274e01c0a03589235b6fc884ecd3d5b6fab6cb2dd4d3bfb1dc4c2d6"} Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.658675 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n2454"] Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.660786 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.694004 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2454"] Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.697670 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-vf4mk" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.791216 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-utilities\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.791375 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-catalog-content\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.791539 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb4fc\" (UniqueName: \"kubernetes.io/projected/bb15aafc-57e7-4069-be26-b70743c103e3-kube-api-access-vb4fc\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.893401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-utilities\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.893502 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-catalog-content\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.893598 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb4fc\" (UniqueName: \"kubernetes.io/projected/bb15aafc-57e7-4069-be26-b70743c103e3-kube-api-access-vb4fc\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.894069 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-utilities\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.894441 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-catalog-content\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.921323 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb4fc\" (UniqueName: \"kubernetes.io/projected/bb15aafc-57e7-4069-be26-b70743c103e3-kube-api-access-vb4fc\") pod \"redhat-marketplace-n2454\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.977711 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:31 crc kubenswrapper[4879]: I1125 14:50:31.997540 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-m7d57" Nov 25 14:50:32 crc kubenswrapper[4879]: I1125 14:50:32.500749 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2454"] Nov 25 14:50:32 crc kubenswrapper[4879]: I1125 14:50:32.556144 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2454" event={"ID":"bb15aafc-57e7-4069-be26-b70743c103e3","Type":"ContainerStarted","Data":"a95b0ea63752169fed893aea3ba92459e9dd4505cce99e63a0257b5d3f6c8da7"} Nov 25 14:50:32 crc kubenswrapper[4879]: I1125 14:50:32.568496 4879 generic.go:334] "Generic (PLEG): container finished" podID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerID="467cda5a92e4a9568c185fb7b0dd1d0604375fe7911694bf33b41a4495f33f7c" exitCode=0 Nov 25 14:50:32 crc kubenswrapper[4879]: I1125 14:50:32.568840 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cws7f" event={"ID":"78bf95c7-c5cc-4ea8-80de-f76070054d12","Type":"ContainerDied","Data":"467cda5a92e4a9568c185fb7b0dd1d0604375fe7911694bf33b41a4495f33f7c"} Nov 25 14:50:32 crc kubenswrapper[4879]: I1125 14:50:32.573969 4879 generic.go:334] "Generic (PLEG): container finished" podID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerID="e1a0ed94e274e01c0a03589235b6fc884ecd3d5b6fab6cb2dd4d3bfb1dc4c2d6" exitCode=0 Nov 25 14:50:32 crc kubenswrapper[4879]: I1125 14:50:32.574001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerDied","Data":"e1a0ed94e274e01c0a03589235b6fc884ecd3d5b6fab6cb2dd4d3bfb1dc4c2d6"} Nov 25 14:50:33 crc kubenswrapper[4879]: I1125 14:50:33.588738 4879 generic.go:334] "Generic (PLEG): container finished" podID="bb15aafc-57e7-4069-be26-b70743c103e3" containerID="151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3" exitCode=0 Nov 25 14:50:33 crc kubenswrapper[4879]: I1125 14:50:33.588861 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2454" event={"ID":"bb15aafc-57e7-4069-be26-b70743c103e3","Type":"ContainerDied","Data":"151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3"} Nov 25 14:50:34 crc kubenswrapper[4879]: I1125 14:50:34.817695 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:34 crc kubenswrapper[4879]: I1125 14:50:34.818348 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:50:35 crc kubenswrapper[4879]: I1125 14:50:35.610762 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerStarted","Data":"e47a66dcec5ec2389118153138801966582932ef0fc74f068a3d7f33272c3cb3"} Nov 25 14:50:37 crc kubenswrapper[4879]: I1125 14:50:37.647435 4879 generic.go:334] "Generic (PLEG): container finished" podID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerID="12cb4bc8da4db9491df4437ab2a20d80c67926b82bd1b5a731c8081c2c5dfefc" exitCode=0 Nov 25 14:50:37 crc kubenswrapper[4879]: I1125 14:50:37.704396 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cws7f" event={"ID":"78bf95c7-c5cc-4ea8-80de-f76070054d12","Type":"ContainerDied","Data":"12cb4bc8da4db9491df4437ab2a20d80c67926b82bd1b5a731c8081c2c5dfefc"} Nov 25 14:50:37 crc kubenswrapper[4879]: I1125 14:50:37.704441 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerStarted","Data":"489841467fceac4c06449f6b44bb905a9281d72e2fa5d56769ca6ab00da80995"} Nov 25 14:50:40 crc kubenswrapper[4879]: I1125 14:50:40.685537 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerStarted","Data":"68b67372f1b83df26bf2e310bb04308ab0f841ffbb6e77c1cdb91b15e8d05f37"} Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.312266 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-lgf9c"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.313862 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.323326 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lgf9c"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.413443 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-4svkl"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.420346 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.462091 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-operator-scripts\") pod \"nova-api-db-create-lgf9c\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.462170 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9fls\" (UniqueName: \"kubernetes.io/projected/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-kube-api-access-q9fls\") pod \"nova-api-db-create-lgf9c\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.481225 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-4svkl"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.528214 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pgffl"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.529547 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.547216 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-40fc-account-create-vzsl2"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.550865 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.554155 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.562552 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-40fc-account-create-vzsl2"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.563308 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/512a9f5d-6203-4731-b139-e3698f82f8ca-operator-scripts\") pod \"nova-cell0-db-create-4svkl\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.563378 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh89k\" (UniqueName: \"kubernetes.io/projected/512a9f5d-6203-4731-b139-e3698f82f8ca-kube-api-access-nh89k\") pod \"nova-cell0-db-create-4svkl\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.563409 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-operator-scripts\") pod \"nova-api-db-create-lgf9c\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.563448 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9fls\" (UniqueName: \"kubernetes.io/projected/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-kube-api-access-q9fls\") pod \"nova-api-db-create-lgf9c\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.564543 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-operator-scripts\") pod \"nova-api-db-create-lgf9c\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.587180 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pgffl"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.595031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9fls\" (UniqueName: \"kubernetes.io/projected/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-kube-api-access-q9fls\") pod \"nova-api-db-create-lgf9c\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.665364 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0199070f-75cd-4d1b-975f-e3b655a975d1-operator-scripts\") pod \"nova-cell1-db-create-pgffl\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.665437 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bad898-a521-4f6d-b02f-f6ced31ae960-operator-scripts\") pod \"nova-api-40fc-account-create-vzsl2\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.665498 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-452qv\" (UniqueName: \"kubernetes.io/projected/0199070f-75cd-4d1b-975f-e3b655a975d1-kube-api-access-452qv\") pod \"nova-cell1-db-create-pgffl\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.665538 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/512a9f5d-6203-4731-b139-e3698f82f8ca-operator-scripts\") pod \"nova-cell0-db-create-4svkl\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.665572 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m4b7\" (UniqueName: \"kubernetes.io/projected/43bad898-a521-4f6d-b02f-f6ced31ae960-kube-api-access-2m4b7\") pod \"nova-api-40fc-account-create-vzsl2\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.665610 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh89k\" (UniqueName: \"kubernetes.io/projected/512a9f5d-6203-4731-b139-e3698f82f8ca-kube-api-access-nh89k\") pod \"nova-cell0-db-create-4svkl\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.666885 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/512a9f5d-6203-4731-b139-e3698f82f8ca-operator-scripts\") pod \"nova-cell0-db-create-4svkl\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.672737 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.696506 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh89k\" (UniqueName: \"kubernetes.io/projected/512a9f5d-6203-4731-b139-e3698f82f8ca-kube-api-access-nh89k\") pod \"nova-cell0-db-create-4svkl\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.735049 4879 generic.go:334] "Generic (PLEG): container finished" podID="bb15aafc-57e7-4069-be26-b70743c103e3" containerID="d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d" exitCode=0 Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.735141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2454" event={"ID":"bb15aafc-57e7-4069-be26-b70743c103e3","Type":"ContainerDied","Data":"d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d"} Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.747030 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-c4f1-account-create-f8ntq"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.748293 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.751274 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.755967 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.756174 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cws7f" event={"ID":"78bf95c7-c5cc-4ea8-80de-f76070054d12","Type":"ContainerStarted","Data":"9343436781b61bd3ebd9153984239f2fc0376be9027802ac40c4803a93fa4db4"} Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.768977 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0199070f-75cd-4d1b-975f-e3b655a975d1-operator-scripts\") pod \"nova-cell1-db-create-pgffl\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.769080 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bad898-a521-4f6d-b02f-f6ced31ae960-operator-scripts\") pod \"nova-api-40fc-account-create-vzsl2\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.769186 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-452qv\" (UniqueName: \"kubernetes.io/projected/0199070f-75cd-4d1b-975f-e3b655a975d1-kube-api-access-452qv\") pod \"nova-cell1-db-create-pgffl\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.769249 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m4b7\" (UniqueName: \"kubernetes.io/projected/43bad898-a521-4f6d-b02f-f6ced31ae960-kube-api-access-2m4b7\") pod \"nova-api-40fc-account-create-vzsl2\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.770781 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0199070f-75cd-4d1b-975f-e3b655a975d1-operator-scripts\") pod \"nova-cell1-db-create-pgffl\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.772101 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bad898-a521-4f6d-b02f-f6ced31ae960-operator-scripts\") pod \"nova-api-40fc-account-create-vzsl2\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.805943 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-452qv\" (UniqueName: \"kubernetes.io/projected/0199070f-75cd-4d1b-975f-e3b655a975d1-kube-api-access-452qv\") pod \"nova-cell1-db-create-pgffl\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.806069 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-c4f1-account-create-f8ntq"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.809608 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m4b7\" (UniqueName: \"kubernetes.io/projected/43bad898-a521-4f6d-b02f-f6ced31ae960-kube-api-access-2m4b7\") pod \"nova-api-40fc-account-create-vzsl2\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.849744 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cws7f" podStartSLOduration=6.744877138 podStartE2EDuration="13.849723752s" podCreationTimestamp="2025-11-25 14:50:28 +0000 UTC" firstStartedPulling="2025-11-25 14:50:32.57200671 +0000 UTC m=+1524.175419781" lastFinishedPulling="2025-11-25 14:50:39.676853324 +0000 UTC m=+1531.280266395" observedRunningTime="2025-11-25 14:50:41.832135237 +0000 UTC m=+1533.435548298" watchObservedRunningTime="2025-11-25 14:50:41.849723752 +0000 UTC m=+1533.453136813" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.871481 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh7nd\" (UniqueName: \"kubernetes.io/projected/1753af15-90b5-420c-bd77-86c117e8dd51-kube-api-access-xh7nd\") pod \"nova-cell0-c4f1-account-create-f8ntq\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.871614 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.871638 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1753af15-90b5-420c-bd77-86c117e8dd51-operator-scripts\") pod \"nova-cell0-c4f1-account-create-f8ntq\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.882258 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.931862 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-c293-account-create-nmhlw"] Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.933057 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.937341 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.973386 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdbf6\" (UniqueName: \"kubernetes.io/projected/d83638d7-7015-4b89-b959-13bd8c563ad4-kube-api-access-mdbf6\") pod \"nova-cell1-c293-account-create-nmhlw\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.973521 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d83638d7-7015-4b89-b959-13bd8c563ad4-operator-scripts\") pod \"nova-cell1-c293-account-create-nmhlw\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.973562 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1753af15-90b5-420c-bd77-86c117e8dd51-operator-scripts\") pod \"nova-cell0-c4f1-account-create-f8ntq\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.973668 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh7nd\" (UniqueName: \"kubernetes.io/projected/1753af15-90b5-420c-bd77-86c117e8dd51-kube-api-access-xh7nd\") pod \"nova-cell0-c4f1-account-create-f8ntq\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.974936 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1753af15-90b5-420c-bd77-86c117e8dd51-operator-scripts\") pod \"nova-cell0-c4f1-account-create-f8ntq\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:41 crc kubenswrapper[4879]: I1125 14:50:41.977206 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c293-account-create-nmhlw"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.005971 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh7nd\" (UniqueName: \"kubernetes.io/projected/1753af15-90b5-420c-bd77-86c117e8dd51-kube-api-access-xh7nd\") pod \"nova-cell0-c4f1-account-create-f8ntq\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.075268 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d83638d7-7015-4b89-b959-13bd8c563ad4-operator-scripts\") pod \"nova-cell1-c293-account-create-nmhlw\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.075447 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdbf6\" (UniqueName: \"kubernetes.io/projected/d83638d7-7015-4b89-b959-13bd8c563ad4-kube-api-access-mdbf6\") pod \"nova-cell1-c293-account-create-nmhlw\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.076923 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d83638d7-7015-4b89-b959-13bd8c563ad4-operator-scripts\") pod \"nova-cell1-c293-account-create-nmhlw\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.094191 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdbf6\" (UniqueName: \"kubernetes.io/projected/d83638d7-7015-4b89-b959-13bd8c563ad4-kube-api-access-mdbf6\") pod \"nova-cell1-c293-account-create-nmhlw\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.242428 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.261650 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.264465 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.270583 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.273203 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.273476 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.273627 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.273845 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-nxcpq" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.288012 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.382299 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-scripts\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.382350 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.382400 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.382424 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zmz4\" (UniqueName: \"kubernetes.io/projected/d6586055-fe28-486c-8733-032241ccf52c-kube-api-access-4zmz4\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.382507 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6586055-fe28-486c-8733-032241ccf52c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.382581 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.484027 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-scripts\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.484079 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.484101 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.484141 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zmz4\" (UniqueName: \"kubernetes.io/projected/d6586055-fe28-486c-8733-032241ccf52c-kube-api-access-4zmz4\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.488572 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6586055-fe28-486c-8733-032241ccf52c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.488682 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.490238 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6586055-fe28-486c-8733-032241ccf52c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.493220 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-scripts\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.503538 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.504828 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.526183 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zmz4\" (UniqueName: \"kubernetes.io/projected/d6586055-fe28-486c-8733-032241ccf52c-kube-api-access-4zmz4\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.555215 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.565184 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-4svkl"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.582040 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lgf9c"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.600546 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.624967 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7965d46465-b9w8p" Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.699236 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pgffl"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.728039 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-40fc-account-create-vzsl2"] Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.836529 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pgffl" event={"ID":"0199070f-75cd-4d1b-975f-e3b655a975d1","Type":"ContainerStarted","Data":"e33bae2d9980d0e8cbf223c377450e5a356d9dbfd591df6582e9cb2b6183cf52"} Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.838039 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4svkl" event={"ID":"512a9f5d-6203-4731-b139-e3698f82f8ca","Type":"ContainerStarted","Data":"03f4d444b84d1b2d63958f42d81cc80fb8e0143c89d8a8b6e30e3fefd86b1aa5"} Nov 25 14:50:42 crc kubenswrapper[4879]: I1125 14:50:42.845476 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lgf9c" event={"ID":"4d7d4dbb-8a59-4b25-a339-9c581c375b4a","Type":"ContainerStarted","Data":"042c2f81846b6e166b32f4b89e687f9ff911971b39dd5c959b0e39958050983d"} Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.269084 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-c293-account-create-nmhlw"] Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.303842 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-c4f1-account-create-f8ntq"] Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.552569 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.863048 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lgf9c" event={"ID":"4d7d4dbb-8a59-4b25-a339-9c581c375b4a","Type":"ContainerStarted","Data":"93a7daf489e066419642d541945b2893cce9e5e066db82129cc2cfab9c02e43d"} Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.866158 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-40fc-account-create-vzsl2" event={"ID":"43bad898-a521-4f6d-b02f-f6ced31ae960","Type":"ContainerStarted","Data":"84b4aee8acff5347fa2d5311d9fe19d94cb19d7939e1bf36096045301571068a"} Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.868754 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" event={"ID":"1753af15-90b5-420c-bd77-86c117e8dd51","Type":"ContainerStarted","Data":"4521a2aa665654639988c3164c102b80249ed51042ed5039e136301a6d21ed32"} Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.877220 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c293-account-create-nmhlw" event={"ID":"d83638d7-7015-4b89-b959-13bd8c563ad4","Type":"ContainerStarted","Data":"0884f6f6e1f0351e195d9aa4e59c76e5f01795f637baec5fa4f5ec0ce2605b1b"} Nov 25 14:50:43 crc kubenswrapper[4879]: I1125 14:50:43.887605 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4svkl" event={"ID":"512a9f5d-6203-4731-b139-e3698f82f8ca","Type":"ContainerStarted","Data":"37f1273f7cf5c502d0c77a51a61d32202ae16d213bc42b5c8e8e7ee5c19bf9d5"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.899367 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2454" event={"ID":"bb15aafc-57e7-4069-be26-b70743c103e3","Type":"ContainerStarted","Data":"7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.901766 4879 generic.go:334] "Generic (PLEG): container finished" podID="0199070f-75cd-4d1b-975f-e3b655a975d1" containerID="a457e76a3c320a65a07c2c4f3d622b70550393c726b7091f59077e4f3566579e" exitCode=0 Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.901861 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pgffl" event={"ID":"0199070f-75cd-4d1b-975f-e3b655a975d1","Type":"ContainerDied","Data":"a457e76a3c320a65a07c2c4f3d622b70550393c726b7091f59077e4f3566579e"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.903874 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" event={"ID":"1753af15-90b5-420c-bd77-86c117e8dd51","Type":"ContainerStarted","Data":"266a6f13e0258d2c83de2f1cada5a19bdc337665505445a2038543adf1aa1655"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.905187 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d6586055-fe28-486c-8733-032241ccf52c","Type":"ContainerStarted","Data":"eb8ac9df4846e030cc4f99022307ea1ef3e9b61bde8b6441e728a6b636224a8d"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.907055 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c293-account-create-nmhlw" event={"ID":"d83638d7-7015-4b89-b959-13bd8c563ad4","Type":"ContainerStarted","Data":"7d080f8359f9fea26f6bc4835f5bfefbc20ad3438d3ba6af33f49da9ea90d11f"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.909542 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-40fc-account-create-vzsl2" event={"ID":"43bad898-a521-4f6d-b02f-f6ced31ae960","Type":"ContainerStarted","Data":"fe799137727ba1eb4807a4cc49ddff7756508ae8f1751901fa9dc73f1198b961"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.912787 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerStarted","Data":"e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf"} Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.940459 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-c293-account-create-nmhlw" podStartSLOduration=3.940433655 podStartE2EDuration="3.940433655s" podCreationTimestamp="2025-11-25 14:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:44.928744023 +0000 UTC m=+1536.532157094" watchObservedRunningTime="2025-11-25 14:50:44.940433655 +0000 UTC m=+1536.543846726" Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.952420 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" podStartSLOduration=3.9523937350000002 podStartE2EDuration="3.952393735s" podCreationTimestamp="2025-11-25 14:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:44.941362801 +0000 UTC m=+1536.544775872" watchObservedRunningTime="2025-11-25 14:50:44.952393735 +0000 UTC m=+1536.555806806" Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.960499 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-4svkl" podStartSLOduration=3.960477968 podStartE2EDuration="3.960477968s" podCreationTimestamp="2025-11-25 14:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:44.95586717 +0000 UTC m=+1536.559280261" watchObservedRunningTime="2025-11-25 14:50:44.960477968 +0000 UTC m=+1536.563891039" Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.978513 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-lgf9c" podStartSLOduration=3.978494445 podStartE2EDuration="3.978494445s" podCreationTimestamp="2025-11-25 14:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:44.969383333 +0000 UTC m=+1536.572796404" watchObservedRunningTime="2025-11-25 14:50:44.978494445 +0000 UTC m=+1536.581907526" Nov 25 14:50:44 crc kubenswrapper[4879]: I1125 14:50:44.997365 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-40fc-account-create-vzsl2" podStartSLOduration=3.997338173 podStartE2EDuration="3.997338173s" podCreationTimestamp="2025-11-25 14:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:44.988304275 +0000 UTC m=+1536.591717336" watchObservedRunningTime="2025-11-25 14:50:44.997338173 +0000 UTC m=+1536.600751244" Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.928643 4879 generic.go:334] "Generic (PLEG): container finished" podID="d83638d7-7015-4b89-b959-13bd8c563ad4" containerID="7d080f8359f9fea26f6bc4835f5bfefbc20ad3438d3ba6af33f49da9ea90d11f" exitCode=0 Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.928743 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c293-account-create-nmhlw" event={"ID":"d83638d7-7015-4b89-b959-13bd8c563ad4","Type":"ContainerDied","Data":"7d080f8359f9fea26f6bc4835f5bfefbc20ad3438d3ba6af33f49da9ea90d11f"} Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.932239 4879 generic.go:334] "Generic (PLEG): container finished" podID="512a9f5d-6203-4731-b139-e3698f82f8ca" containerID="37f1273f7cf5c502d0c77a51a61d32202ae16d213bc42b5c8e8e7ee5c19bf9d5" exitCode=0 Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.932292 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4svkl" event={"ID":"512a9f5d-6203-4731-b139-e3698f82f8ca","Type":"ContainerDied","Data":"37f1273f7cf5c502d0c77a51a61d32202ae16d213bc42b5c8e8e7ee5c19bf9d5"} Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.935553 4879 generic.go:334] "Generic (PLEG): container finished" podID="4d7d4dbb-8a59-4b25-a339-9c581c375b4a" containerID="93a7daf489e066419642d541945b2893cce9e5e066db82129cc2cfab9c02e43d" exitCode=0 Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.935630 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lgf9c" event={"ID":"4d7d4dbb-8a59-4b25-a339-9c581c375b4a","Type":"ContainerDied","Data":"93a7daf489e066419642d541945b2893cce9e5e066db82129cc2cfab9c02e43d"} Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.937600 4879 generic.go:334] "Generic (PLEG): container finished" podID="43bad898-a521-4f6d-b02f-f6ced31ae960" containerID="fe799137727ba1eb4807a4cc49ddff7756508ae8f1751901fa9dc73f1198b961" exitCode=0 Nov 25 14:50:45 crc kubenswrapper[4879]: I1125 14:50:45.937643 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-40fc-account-create-vzsl2" event={"ID":"43bad898-a521-4f6d-b02f-f6ced31ae960","Type":"ContainerDied","Data":"fe799137727ba1eb4807a4cc49ddff7756508ae8f1751901fa9dc73f1198b961"} Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.026614 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n2454" podStartSLOduration=4.6756264 podStartE2EDuration="15.026597118s" podCreationTimestamp="2025-11-25 14:50:31 +0000 UTC" firstStartedPulling="2025-11-25 14:50:34.197270833 +0000 UTC m=+1525.800683904" lastFinishedPulling="2025-11-25 14:50:44.548241551 +0000 UTC m=+1536.151654622" observedRunningTime="2025-11-25 14:50:46.02525085 +0000 UTC m=+1537.628663921" watchObservedRunningTime="2025-11-25 14:50:46.026597118 +0000 UTC m=+1537.630010189" Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.110059 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.110311 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-log" containerID="cri-o://1e6667397c097692100efe4ff452f53f8325243348eea48e838c523168da37a7" gracePeriod=30 Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.110452 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-httpd" containerID="cri-o://7577298483f3d806f50edb174d215875489985db1478c5c365e6375cc6cc5ed0" gracePeriod=30 Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.950919 4879 generic.go:334] "Generic (PLEG): container finished" podID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerID="1e6667397c097692100efe4ff452f53f8325243348eea48e838c523168da37a7" exitCode=143 Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.951168 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"dcc96323-05d7-4a47-9f3d-422508fcabcc","Type":"ContainerDied","Data":"1e6667397c097692100efe4ff452f53f8325243348eea48e838c523168da37a7"} Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.953384 4879 generic.go:334] "Generic (PLEG): container finished" podID="1753af15-90b5-420c-bd77-86c117e8dd51" containerID="266a6f13e0258d2c83de2f1cada5a19bdc337665505445a2038543adf1aa1655" exitCode=0 Nov 25 14:50:46 crc kubenswrapper[4879]: I1125 14:50:46.953597 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" event={"ID":"1753af15-90b5-420c-bd77-86c117e8dd51","Type":"ContainerDied","Data":"266a6f13e0258d2c83de2f1cada5a19bdc337665505445a2038543adf1aa1655"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.208784 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.210596 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.210838 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-log" containerID="cri-o://da67a389b74431ca72138a70f07b00255a1ee909f216b0cbf4c75bb7fc5b974d" gracePeriod=30 Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.210976 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-httpd" containerID="cri-o://75fffef0ceaa93e38bd59fcdeb4600d29283f324dbf62e1fe78cd408a68c4581" gracePeriod=30 Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.322654 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-452qv\" (UniqueName: \"kubernetes.io/projected/0199070f-75cd-4d1b-975f-e3b655a975d1-kube-api-access-452qv\") pod \"0199070f-75cd-4d1b-975f-e3b655a975d1\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.322713 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0199070f-75cd-4d1b-975f-e3b655a975d1-operator-scripts\") pod \"0199070f-75cd-4d1b-975f-e3b655a975d1\" (UID: \"0199070f-75cd-4d1b-975f-e3b655a975d1\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.323805 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0199070f-75cd-4d1b-975f-e3b655a975d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0199070f-75cd-4d1b-975f-e3b655a975d1" (UID: "0199070f-75cd-4d1b-975f-e3b655a975d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.324609 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0199070f-75cd-4d1b-975f-e3b655a975d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.339044 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0199070f-75cd-4d1b-975f-e3b655a975d1-kube-api-access-452qv" (OuterVolumeSpecName: "kube-api-access-452qv") pod "0199070f-75cd-4d1b-975f-e3b655a975d1" (UID: "0199070f-75cd-4d1b-975f-e3b655a975d1"). InnerVolumeSpecName "kube-api-access-452qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.429578 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-452qv\" (UniqueName: \"kubernetes.io/projected/0199070f-75cd-4d1b-975f-e3b655a975d1-kube-api-access-452qv\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.501173 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.657678 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9fls\" (UniqueName: \"kubernetes.io/projected/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-kube-api-access-q9fls\") pod \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.657755 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-operator-scripts\") pod \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\" (UID: \"4d7d4dbb-8a59-4b25-a339-9c581c375b4a\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.661396 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4d7d4dbb-8a59-4b25-a339-9c581c375b4a" (UID: "4d7d4dbb-8a59-4b25-a339-9c581c375b4a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.668666 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-kube-api-access-q9fls" (OuterVolumeSpecName: "kube-api-access-q9fls") pod "4d7d4dbb-8a59-4b25-a339-9c581c375b4a" (UID: "4d7d4dbb-8a59-4b25-a339-9c581c375b4a"). InnerVolumeSpecName "kube-api-access-q9fls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.707110 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.713170 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.763019 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9fls\" (UniqueName: \"kubernetes.io/projected/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-kube-api-access-q9fls\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.763065 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d7d4dbb-8a59-4b25-a339-9c581c375b4a-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.799480 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.864856 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh89k\" (UniqueName: \"kubernetes.io/projected/512a9f5d-6203-4731-b139-e3698f82f8ca-kube-api-access-nh89k\") pod \"512a9f5d-6203-4731-b139-e3698f82f8ca\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.864909 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/512a9f5d-6203-4731-b139-e3698f82f8ca-operator-scripts\") pod \"512a9f5d-6203-4731-b139-e3698f82f8ca\" (UID: \"512a9f5d-6203-4731-b139-e3698f82f8ca\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.864935 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d83638d7-7015-4b89-b959-13bd8c563ad4-operator-scripts\") pod \"d83638d7-7015-4b89-b959-13bd8c563ad4\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865158 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m4b7\" (UniqueName: \"kubernetes.io/projected/43bad898-a521-4f6d-b02f-f6ced31ae960-kube-api-access-2m4b7\") pod \"43bad898-a521-4f6d-b02f-f6ced31ae960\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865213 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bad898-a521-4f6d-b02f-f6ced31ae960-operator-scripts\") pod \"43bad898-a521-4f6d-b02f-f6ced31ae960\" (UID: \"43bad898-a521-4f6d-b02f-f6ced31ae960\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865263 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdbf6\" (UniqueName: \"kubernetes.io/projected/d83638d7-7015-4b89-b959-13bd8c563ad4-kube-api-access-mdbf6\") pod \"d83638d7-7015-4b89-b959-13bd8c563ad4\" (UID: \"d83638d7-7015-4b89-b959-13bd8c563ad4\") " Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865476 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/512a9f5d-6203-4731-b139-e3698f82f8ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "512a9f5d-6203-4731-b139-e3698f82f8ca" (UID: "512a9f5d-6203-4731-b139-e3698f82f8ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865553 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d83638d7-7015-4b89-b959-13bd8c563ad4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d83638d7-7015-4b89-b959-13bd8c563ad4" (UID: "d83638d7-7015-4b89-b959-13bd8c563ad4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865814 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/512a9f5d-6203-4731-b139-e3698f82f8ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865843 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d83638d7-7015-4b89-b959-13bd8c563ad4-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.865903 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43bad898-a521-4f6d-b02f-f6ced31ae960-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43bad898-a521-4f6d-b02f-f6ced31ae960" (UID: "43bad898-a521-4f6d-b02f-f6ced31ae960"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.868209 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/512a9f5d-6203-4731-b139-e3698f82f8ca-kube-api-access-nh89k" (OuterVolumeSpecName: "kube-api-access-nh89k") pod "512a9f5d-6203-4731-b139-e3698f82f8ca" (UID: "512a9f5d-6203-4731-b139-e3698f82f8ca"). InnerVolumeSpecName "kube-api-access-nh89k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.869152 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d83638d7-7015-4b89-b959-13bd8c563ad4-kube-api-access-mdbf6" (OuterVolumeSpecName: "kube-api-access-mdbf6") pod "d83638d7-7015-4b89-b959-13bd8c563ad4" (UID: "d83638d7-7015-4b89-b959-13bd8c563ad4"). InnerVolumeSpecName "kube-api-access-mdbf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.869259 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43bad898-a521-4f6d-b02f-f6ced31ae960-kube-api-access-2m4b7" (OuterVolumeSpecName: "kube-api-access-2m4b7") pod "43bad898-a521-4f6d-b02f-f6ced31ae960" (UID: "43bad898-a521-4f6d-b02f-f6ced31ae960"). InnerVolumeSpecName "kube-api-access-2m4b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.965630 4879 generic.go:334] "Generic (PLEG): container finished" podID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerID="da67a389b74431ca72138a70f07b00255a1ee909f216b0cbf4c75bb7fc5b974d" exitCode=143 Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.965670 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"91e65be6-32db-4fc4-a6d8-c2cba036263e","Type":"ContainerDied","Data":"da67a389b74431ca72138a70f07b00255a1ee909f216b0cbf4c75bb7fc5b974d"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.967581 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdbf6\" (UniqueName: \"kubernetes.io/projected/d83638d7-7015-4b89-b959-13bd8c563ad4-kube-api-access-mdbf6\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.967611 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh89k\" (UniqueName: \"kubernetes.io/projected/512a9f5d-6203-4731-b139-e3698f82f8ca-kube-api-access-nh89k\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.967621 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m4b7\" (UniqueName: \"kubernetes.io/projected/43bad898-a521-4f6d-b02f-f6ced31ae960-kube-api-access-2m4b7\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.967634 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43bad898-a521-4f6d-b02f-f6ced31ae960-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.969706 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-c293-account-create-nmhlw" event={"ID":"d83638d7-7015-4b89-b959-13bd8c563ad4","Type":"ContainerDied","Data":"0884f6f6e1f0351e195d9aa4e59c76e5f01795f637baec5fa4f5ec0ce2605b1b"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.969763 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0884f6f6e1f0351e195d9aa4e59c76e5f01795f637baec5fa4f5ec0ce2605b1b" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.969735 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-c293-account-create-nmhlw" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.971913 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-4svkl" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.971904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-4svkl" event={"ID":"512a9f5d-6203-4731-b139-e3698f82f8ca","Type":"ContainerDied","Data":"03f4d444b84d1b2d63958f42d81cc80fb8e0143c89d8a8b6e30e3fefd86b1aa5"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.972048 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03f4d444b84d1b2d63958f42d81cc80fb8e0143c89d8a8b6e30e3fefd86b1aa5" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.973783 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lgf9c" event={"ID":"4d7d4dbb-8a59-4b25-a339-9c581c375b4a","Type":"ContainerDied","Data":"042c2f81846b6e166b32f4b89e687f9ff911971b39dd5c959b0e39958050983d"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.973823 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="042c2f81846b6e166b32f4b89e687f9ff911971b39dd5c959b0e39958050983d" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.973800 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lgf9c" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.975366 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-40fc-account-create-vzsl2" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.975381 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-40fc-account-create-vzsl2" event={"ID":"43bad898-a521-4f6d-b02f-f6ced31ae960","Type":"ContainerDied","Data":"84b4aee8acff5347fa2d5311d9fe19d94cb19d7939e1bf36096045301571068a"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.975416 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84b4aee8acff5347fa2d5311d9fe19d94cb19d7939e1bf36096045301571068a" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.977886 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pgffl" Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.978330 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pgffl" event={"ID":"0199070f-75cd-4d1b-975f-e3b655a975d1","Type":"ContainerDied","Data":"e33bae2d9980d0e8cbf223c377450e5a356d9dbfd591df6582e9cb2b6183cf52"} Nov 25 14:50:47 crc kubenswrapper[4879]: I1125 14:50:47.978388 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e33bae2d9980d0e8cbf223c377450e5a356d9dbfd591df6582e9cb2b6183cf52" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.320862 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.386863 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xh7nd\" (UniqueName: \"kubernetes.io/projected/1753af15-90b5-420c-bd77-86c117e8dd51-kube-api-access-xh7nd\") pod \"1753af15-90b5-420c-bd77-86c117e8dd51\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.386938 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1753af15-90b5-420c-bd77-86c117e8dd51-operator-scripts\") pod \"1753af15-90b5-420c-bd77-86c117e8dd51\" (UID: \"1753af15-90b5-420c-bd77-86c117e8dd51\") " Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.391240 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1753af15-90b5-420c-bd77-86c117e8dd51-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1753af15-90b5-420c-bd77-86c117e8dd51" (UID: "1753af15-90b5-420c-bd77-86c117e8dd51"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.393387 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1753af15-90b5-420c-bd77-86c117e8dd51-kube-api-access-xh7nd" (OuterVolumeSpecName: "kube-api-access-xh7nd") pod "1753af15-90b5-420c-bd77-86c117e8dd51" (UID: "1753af15-90b5-420c-bd77-86c117e8dd51"). InnerVolumeSpecName "kube-api-access-xh7nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.488871 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xh7nd\" (UniqueName: \"kubernetes.io/projected/1753af15-90b5-420c-bd77-86c117e8dd51-kube-api-access-xh7nd\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.488931 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1753af15-90b5-420c-bd77-86c117e8dd51-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.988780 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" event={"ID":"1753af15-90b5-420c-bd77-86c117e8dd51","Type":"ContainerDied","Data":"4521a2aa665654639988c3164c102b80249ed51042ed5039e136301a6d21ed32"} Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.988820 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4521a2aa665654639988c3164c102b80249ed51042ed5039e136301a6d21ed32" Nov 25 14:50:48 crc kubenswrapper[4879]: I1125 14:50:48.988881 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-c4f1-account-create-f8ntq" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.071948 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.072726 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523115 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-5ndpb"] Nov 25 14:50:49 crc kubenswrapper[4879]: E1125 14:50:49.523642 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0199070f-75cd-4d1b-975f-e3b655a975d1" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523668 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0199070f-75cd-4d1b-975f-e3b655a975d1" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: E1125 14:50:49.523688 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43bad898-a521-4f6d-b02f-f6ced31ae960" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523696 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="43bad898-a521-4f6d-b02f-f6ced31ae960" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: E1125 14:50:49.523730 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d7d4dbb-8a59-4b25-a339-9c581c375b4a" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523739 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d7d4dbb-8a59-4b25-a339-9c581c375b4a" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: E1125 14:50:49.523752 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1753af15-90b5-420c-bd77-86c117e8dd51" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523759 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1753af15-90b5-420c-bd77-86c117e8dd51" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: E1125 14:50:49.523771 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="512a9f5d-6203-4731-b139-e3698f82f8ca" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523778 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="512a9f5d-6203-4731-b139-e3698f82f8ca" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: E1125 14:50:49.523797 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83638d7-7015-4b89-b959-13bd8c563ad4" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523804 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83638d7-7015-4b89-b959-13bd8c563ad4" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.523999 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d83638d7-7015-4b89-b959-13bd8c563ad4" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.524025 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1753af15-90b5-420c-bd77-86c117e8dd51" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.524040 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0199070f-75cd-4d1b-975f-e3b655a975d1" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.524053 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d7d4dbb-8a59-4b25-a339-9c581c375b4a" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.524067 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="43bad898-a521-4f6d-b02f-f6ced31ae960" containerName="mariadb-account-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.524077 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="512a9f5d-6203-4731-b139-e3698f82f8ca" containerName="mariadb-database-create" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.525572 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.547094 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-5ndpb"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.615163 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-swift-storage-0\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.615541 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-nb\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.615575 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-config\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.615630 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-sb\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.615670 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2gf5\" (UniqueName: \"kubernetes.io/projected/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-kube-api-access-k2gf5\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.615727 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-svc\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.717462 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2gf5\" (UniqueName: \"kubernetes.io/projected/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-kube-api-access-k2gf5\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.717917 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-svc\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.718373 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-swift-storage-0\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.718507 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-nb\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.718621 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-config\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.718778 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-sb\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.722937 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-nb\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.723306 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-swift-storage-0\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.723697 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-config\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.724597 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-svc\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.728139 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-sb\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.746064 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-5ndpb"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.746913 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2gf5\" (UniqueName: \"kubernetes.io/projected/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-kube-api-access-k2gf5\") pod \"dnsmasq-dns-5d6bd97c5-5ndpb\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.749561 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.826611 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-774db89647-jg5tc"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.828891 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.840215 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5fd554d876-nlf5m"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.841782 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.858254 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.858362 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.858540 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-9fqwh" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.858589 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.887670 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-774db89647-jg5tc"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921558 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-swift-storage-0\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921612 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-ovndb-tls-certs\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921646 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fkzv\" (UniqueName: \"kubernetes.io/projected/5cde15fa-ca05-443f-b71d-365e01384142-kube-api-access-7fkzv\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921707 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-svc\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921749 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-nb\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921776 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-config\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921817 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-httpd-config\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921845 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5fkw\" (UniqueName: \"kubernetes.io/projected/77312b8f-d676-43b5-bf82-b84f7ddb33c5-kube-api-access-c5fkw\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921881 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-config\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921917 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-combined-ca-bundle\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.921956 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-sb\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.924108 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5fd554d876-nlf5m"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.968303 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.970189 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.995624 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.996413 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Nov 25 14:50:49 crc kubenswrapper[4879]: I1125 14:50:49.996595 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.027867 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.027927 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-nb\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.027963 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-config\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.027989 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-httpd-config\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028020 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5fkw\" (UniqueName: \"kubernetes.io/projected/77312b8f-d676-43b5-bf82-b84f7ddb33c5-kube-api-access-c5fkw\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028068 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028093 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-config\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028165 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh6mv\" (UniqueName: \"kubernetes.io/projected/56f930a5-3344-4b7f-90d4-10a4b758e740-kube-api-access-sh6mv\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028189 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-combined-ca-bundle\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028213 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-public-tls-certs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028241 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-scripts\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028266 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-sb\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028298 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56f930a5-3344-4b7f-90d4-10a4b758e740-etc-machine-id\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028329 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028362 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-swift-storage-0\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028389 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-ovndb-tls-certs\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028451 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fkzv\" (UniqueName: \"kubernetes.io/projected/5cde15fa-ca05-443f-b71d-365e01384142-kube-api-access-7fkzv\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028536 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028569 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-svc\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.028596 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data-custom\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.029546 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-config\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.032866 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-sb\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.033902 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-swift-storage-0\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.038067 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-nb\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.042757 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-svc\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.042803 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.048084 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-combined-ca-bundle\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.048475 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-config\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.048937 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-httpd-config\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.057237 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-ovndb-tls-certs\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.066483 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5fkw\" (UniqueName: \"kubernetes.io/projected/77312b8f-d676-43b5-bf82-b84f7ddb33c5-kube-api-access-c5fkw\") pod \"dnsmasq-dns-774db89647-jg5tc\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.074895 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fkzv\" (UniqueName: \"kubernetes.io/projected/5cde15fa-ca05-443f-b71d-365e01384142-kube-api-access-7fkzv\") pod \"neutron-5fd554d876-nlf5m\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.095076 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerStarted","Data":"305ac93a386e6093dfe88ea6ea13e1ae350e4a4fb6216742eecd607ddca948c2"} Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.095455 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.143642 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56f930a5-3344-4b7f-90d4-10a4b758e740-etc-machine-id\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.143834 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.144037 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.144076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data-custom\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.144144 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.144297 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.173601 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh6mv\" (UniqueName: \"kubernetes.io/projected/56f930a5-3344-4b7f-90d4-10a4b758e740-kube-api-access-sh6mv\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.173739 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-public-tls-certs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.173790 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-scripts\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.251238 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56f930a5-3344-4b7f-90d4-10a4b758e740-etc-machine-id\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.267741 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data-custom\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.268045 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.270167 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.270712 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:50:50 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:50:50 crc kubenswrapper[4879]: > Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.284021 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.285735 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerDied","Data":"489841467fceac4c06449f6b44bb905a9281d72e2fa5d56769ca6ab00da80995"} Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.287015 4879 generic.go:334] "Generic (PLEG): container finished" podID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerID="489841467fceac4c06449f6b44bb905a9281d72e2fa5d56769ca6ab00da80995" exitCode=0 Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.291754 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.314604 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-scripts\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.316085 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.327448 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-public-tls-certs\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.347382 4879 generic.go:334] "Generic (PLEG): container finished" podID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerID="7577298483f3d806f50edb174d215875489985db1478c5c365e6375cc6cc5ed0" exitCode=0 Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.347452 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"dcc96323-05d7-4a47-9f3d-422508fcabcc","Type":"ContainerDied","Data":"7577298483f3d806f50edb174d215875489985db1478c5c365e6375cc6cc5ed0"} Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.348781 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.666958847 podStartE2EDuration="21.348767965s" podCreationTimestamp="2025-11-25 14:50:29 +0000 UTC" firstStartedPulling="2025-11-25 14:50:30.559640782 +0000 UTC m=+1522.163053853" lastFinishedPulling="2025-11-25 14:50:49.2414499 +0000 UTC m=+1540.844862971" observedRunningTime="2025-11-25 14:50:50.26763005 +0000 UTC m=+1541.871043141" watchObservedRunningTime="2025-11-25 14:50:50.348767965 +0000 UTC m=+1541.952181036" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.357588 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.361020 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh6mv\" (UniqueName: \"kubernetes.io/projected/56f930a5-3344-4b7f-90d4-10a4b758e740-kube-api-access-sh6mv\") pod \"cinder-api-0\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.394567 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.742153 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-5ndpb"] Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.894611 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930458 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-combined-ca-bundle\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930792 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtqmr\" (UniqueName: \"kubernetes.io/projected/dcc96323-05d7-4a47-9f3d-422508fcabcc-kube-api-access-vtqmr\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930819 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930854 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-logs\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930919 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-scripts\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930963 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-httpd-run\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.930980 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-public-tls-certs\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.931002 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-config-data\") pod \"dcc96323-05d7-4a47-9f3d-422508fcabcc\" (UID: \"dcc96323-05d7-4a47-9f3d-422508fcabcc\") " Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.937051 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.939269 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-scripts" (OuterVolumeSpecName: "scripts") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.939586 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-logs" (OuterVolumeSpecName: "logs") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.945921 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcc96323-05d7-4a47-9f3d-422508fcabcc-kube-api-access-vtqmr" (OuterVolumeSpecName: "kube-api-access-vtqmr") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "kube-api-access-vtqmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:50 crc kubenswrapper[4879]: I1125 14:50:50.960524 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.003010 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.006856 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-config-data" (OuterVolumeSpecName: "config-data") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034207 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034239 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034249 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034259 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtqmr\" (UniqueName: \"kubernetes.io/projected/dcc96323-05d7-4a47-9f3d-422508fcabcc-kube-api-access-vtqmr\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034286 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034295 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc96323-05d7-4a47-9f3d-422508fcabcc-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.034303 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.069205 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dcc96323-05d7-4a47-9f3d-422508fcabcc" (UID: "dcc96323-05d7-4a47-9f3d-422508fcabcc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.092054 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.135784 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcc96323-05d7-4a47-9f3d-422508fcabcc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.135823 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.170693 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.200245 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-774db89647-jg5tc"] Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.368424 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d6586055-fe28-486c-8733-032241ccf52c","Type":"ContainerStarted","Data":"76cb900db533acd1b9a1a1817de0b202c3c2b3883a680ae32a7f092cd5122448"} Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.381798 4879 generic.go:334] "Generic (PLEG): container finished" podID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerID="75fffef0ceaa93e38bd59fcdeb4600d29283f324dbf62e1fe78cd408a68c4581" exitCode=0 Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.381895 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"91e65be6-32db-4fc4-a6d8-c2cba036263e","Type":"ContainerDied","Data":"75fffef0ceaa93e38bd59fcdeb4600d29283f324dbf62e1fe78cd408a68c4581"} Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.399377 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"dcc96323-05d7-4a47-9f3d-422508fcabcc","Type":"ContainerDied","Data":"8752de461733c913b130b4f5ca752cd2a79ae1df38c85c652ff49f70679c1551"} Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.399429 4879 scope.go:117] "RemoveContainer" containerID="7577298483f3d806f50edb174d215875489985db1478c5c365e6375cc6cc5ed0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.399571 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.423691 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-774db89647-jg5tc" event={"ID":"77312b8f-d676-43b5-bf82-b84f7ddb33c5","Type":"ContainerStarted","Data":"263bb49c89503a2b102d1037dbd2611579a3288b3fcbddab165cac908df22d89"} Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.450737 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"56f930a5-3344-4b7f-90d4-10a4b758e740","Type":"ContainerStarted","Data":"c4ef735fb769d92e45c5f3a173a119bb6a11ed2a4e29298b0700403da0985e73"} Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.454492 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" event={"ID":"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2","Type":"ContainerStarted","Data":"f3d684182abf8976e5679c997abff682cacb25f1366ea3034dad70428eb66c1e"} Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.524215 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5fd554d876-nlf5m"] Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.610568 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.718273 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.726884 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:50:51 crc kubenswrapper[4879]: E1125 14:50:51.727419 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-log" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.727439 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-log" Nov 25 14:50:51 crc kubenswrapper[4879]: E1125 14:50:51.727465 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-httpd" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.727474 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-httpd" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.727725 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-httpd" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.727744 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" containerName="glance-log" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.728760 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.731963 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.732747 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.759561 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.782483 4879 scope.go:117] "RemoveContainer" containerID="1e6667397c097692100efe4ff452f53f8325243348eea48e838c523168da37a7" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910499 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtd9t\" (UniqueName: \"kubernetes.io/projected/1cc2c187-456f-439a-a4b2-33dda7946308-kube-api-access-gtd9t\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910560 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910601 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910676 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-scripts\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910700 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910789 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910830 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-config-data\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.910876 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-logs\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.978356 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.979376 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:50:51 crc kubenswrapper[4879]: I1125 14:50:51.999282 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015156 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015229 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-config-data\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015277 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-logs\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015310 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtd9t\" (UniqueName: \"kubernetes.io/projected/1cc2c187-456f-439a-a4b2-33dda7946308-kube-api-access-gtd9t\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015346 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015375 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015438 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-scripts\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.015465 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.016026 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.017011 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-logs\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.020167 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.028469 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.041693 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-scripts\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.042029 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-config-data\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.066977 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtd9t\" (UniqueName: \"kubernetes.io/projected/1cc2c187-456f-439a-a4b2-33dda7946308-kube-api-access-gtd9t\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.073204 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.120944 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-scripts\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.120986 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-internal-tls-certs\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.121035 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-httpd-run\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.121069 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.121188 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-config-data\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.121285 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xntt6\" (UniqueName: \"kubernetes.io/projected/91e65be6-32db-4fc4-a6d8-c2cba036263e-kube-api-access-xntt6\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.121338 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-logs\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.121422 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-combined-ca-bundle\") pod \"91e65be6-32db-4fc4-a6d8-c2cba036263e\" (UID: \"91e65be6-32db-4fc4-a6d8-c2cba036263e\") " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.127978 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-logs" (OuterVolumeSpecName: "logs") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.141430 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.153510 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.163135 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e65be6-32db-4fc4-a6d8-c2cba036263e-kube-api-access-xntt6" (OuterVolumeSpecName: "kube-api-access-xntt6") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "kube-api-access-xntt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.170035 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.170992 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-scripts" (OuterVolumeSpecName: "scripts") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.224511 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xntt6\" (UniqueName: \"kubernetes.io/projected/91e65be6-32db-4fc4-a6d8-c2cba036263e-kube-api-access-xntt6\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.224556 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.224569 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.224582 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e65be6-32db-4fc4-a6d8-c2cba036263e-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.224614 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.239256 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.336757 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.375209 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.391835 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-h2tdk"] Nov 25 14:50:52 crc kubenswrapper[4879]: E1125 14:50:52.392869 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-httpd" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.392907 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-httpd" Nov 25 14:50:52 crc kubenswrapper[4879]: E1125 14:50:52.392953 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-log" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.392959 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-log" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.393544 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-httpd" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.393558 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" containerName="glance-log" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.413347 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.418737 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.418878 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.419001 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-fdslq" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.438468 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.438777 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.463700 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-h2tdk"] Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.531605 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-config-data" (OuterVolumeSpecName: "config-data") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.547947 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66g2w\" (UniqueName: \"kubernetes.io/projected/dc3c22fd-7723-4f81-af93-2cf3a150cd08-kube-api-access-66g2w\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.548022 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-config-data\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.548068 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-scripts\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.548107 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.548203 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.549524 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd554d876-nlf5m" event={"ID":"5cde15fa-ca05-443f-b71d-365e01384142","Type":"ContainerStarted","Data":"ace7b2b72ae1f9f7d46b15a08f7375bee4ae4be1fda480a863e85408b03d4d59"} Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.557601 4879 generic.go:334] "Generic (PLEG): container finished" podID="0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" containerID="fbe88a6cb4b6ad2f2d6694834ebb4037cf9920df6190fa12bafcde301a5a22df" exitCode=0 Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.557666 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" event={"ID":"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2","Type":"ContainerDied","Data":"fbe88a6cb4b6ad2f2d6694834ebb4037cf9920df6190fa12bafcde301a5a22df"} Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.570477 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"91e65be6-32db-4fc4-a6d8-c2cba036263e","Type":"ContainerDied","Data":"7df6e0d385dcb716d1fece062894b107c5b928ec549d6c8fdeb4047821e62057"} Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.570558 4879 scope.go:117] "RemoveContainer" containerID="75fffef0ceaa93e38bd59fcdeb4600d29283f324dbf62e1fe78cd408a68c4581" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.570830 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.603067 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "91e65be6-32db-4fc4-a6d8-c2cba036263e" (UID: "91e65be6-32db-4fc4-a6d8-c2cba036263e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.610510 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerStarted","Data":"cd26bef6451a151cf7d6475fb5b89a3f24ea96186c01549c08a6edd44d4b67f8"} Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.640424 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-774db89647-jg5tc" event={"ID":"77312b8f-d676-43b5-bf82-b84f7ddb33c5","Type":"ContainerStarted","Data":"2c8d164d2af1c326ff7281b306cee67d5093d123752567a73004d578f5d96aa2"} Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.650678 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.650798 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66g2w\" (UniqueName: \"kubernetes.io/projected/dc3c22fd-7723-4f81-af93-2cf3a150cd08-kube-api-access-66g2w\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.650884 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-config-data\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.650940 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-scripts\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.651018 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e65be6-32db-4fc4-a6d8-c2cba036263e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.651600 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7kdzs" podStartSLOduration=6.113132144 podStartE2EDuration="24.651580083s" podCreationTimestamp="2025-11-25 14:50:28 +0000 UTC" firstStartedPulling="2025-11-25 14:50:32.578237391 +0000 UTC m=+1524.181650452" lastFinishedPulling="2025-11-25 14:50:51.11668532 +0000 UTC m=+1542.720098391" observedRunningTime="2025-11-25 14:50:52.648684093 +0000 UTC m=+1544.252097164" watchObservedRunningTime="2025-11-25 14:50:52.651580083 +0000 UTC m=+1544.254993174" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.667803 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-scripts\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.692885 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.692885 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66g2w\" (UniqueName: \"kubernetes.io/projected/dc3c22fd-7723-4f81-af93-2cf3a150cd08-kube-api-access-66g2w\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.794874 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-config-data\") pod \"nova-cell0-conductor-db-sync-h2tdk\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.824788 4879 scope.go:117] "RemoveContainer" containerID="da67a389b74431ca72138a70f07b00255a1ee909f216b0cbf4c75bb7fc5b974d" Nov 25 14:50:52 crc kubenswrapper[4879]: I1125 14:50:52.829327 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:52.996235 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.023249 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.050676 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.052887 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.055475 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.056676 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.061749 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.116799 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-n2454" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" probeResult="failure" output=< Nov 25 14:50:53 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:50:53 crc kubenswrapper[4879]: > Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185680 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185718 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185744 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-logs\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185783 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185824 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tt6c\" (UniqueName: \"kubernetes.io/projected/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-kube-api-access-8tt6c\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185878 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.185918 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.292975 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.293245 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-logs\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.294043 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.294074 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tt6c\" (UniqueName: \"kubernetes.io/projected/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-kube-api-access-8tt6c\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.294152 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.294212 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.294433 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.294541 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.305434 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.305705 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-logs\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.305871 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.319333 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.323791 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.333263 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.333705 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.343410 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tt6c\" (UniqueName: \"kubernetes.io/projected/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-kube-api-access-8tt6c\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.359785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.456910 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.457278 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-central-agent" containerID="cri-o://e47a66dcec5ec2389118153138801966582932ef0fc74f068a3d7f33272c3cb3" gracePeriod=30 Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.458027 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="proxy-httpd" containerID="cri-o://305ac93a386e6093dfe88ea6ea13e1ae350e4a4fb6216742eecd607ddca948c2" gracePeriod=30 Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.458092 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="sg-core" containerID="cri-o://e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf" gracePeriod=30 Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.458161 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-notification-agent" containerID="cri-o://68b67372f1b83df26bf2e310bb04308ab0f841ffbb6e77c1cdb91b15e8d05f37" gracePeriod=30 Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.463638 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.698659 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.713263 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e65be6-32db-4fc4-a6d8-c2cba036263e" path="/var/lib/kubelet/pods/91e65be6-32db-4fc4-a6d8-c2cba036263e/volumes" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.714288 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcc96323-05d7-4a47-9f3d-422508fcabcc" path="/var/lib/kubelet/pods/dcc96323-05d7-4a47-9f3d-422508fcabcc/volumes" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.715246 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1cc2c187-456f-439a-a4b2-33dda7946308","Type":"ContainerStarted","Data":"98da8eaa2c4d2aed77c8796bba62b7f609cb8cca79843f564dc5096878ee2dda"} Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.715287 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-h2tdk"] Nov 25 14:50:53 crc kubenswrapper[4879]: W1125 14:50:53.739852 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc3c22fd_7723_4f81_af93_2cf3a150cd08.slice/crio-0a2350d0bd8a064f60a957cc924c67c5745fb1c12d380407e406139a368da23b WatchSource:0}: Error finding container 0a2350d0bd8a064f60a957cc924c67c5745fb1c12d380407e406139a368da23b: Status 404 returned error can't find the container with id 0a2350d0bd8a064f60a957cc924c67c5745fb1c12d380407e406139a368da23b Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.743432 4879 generic.go:334] "Generic (PLEG): container finished" podID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerID="2c8d164d2af1c326ff7281b306cee67d5093d123752567a73004d578f5d96aa2" exitCode=0 Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.743702 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-774db89647-jg5tc" event={"ID":"77312b8f-d676-43b5-bf82-b84f7ddb33c5","Type":"ContainerDied","Data":"2c8d164d2af1c326ff7281b306cee67d5093d123752567a73004d578f5d96aa2"} Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.743793 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-774db89647-jg5tc" event={"ID":"77312b8f-d676-43b5-bf82-b84f7ddb33c5","Type":"ContainerStarted","Data":"17138fdd64e95fa1d901748a9f1473a1161c4a782199ea9044150a64c235aa13"} Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.744523 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.779501 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"56f930a5-3344-4b7f-90d4-10a4b758e740","Type":"ContainerStarted","Data":"bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086"} Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.781711 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-774db89647-jg5tc" podStartSLOduration=4.7816982150000005 podStartE2EDuration="4.781698215s" podCreationTimestamp="2025-11-25 14:50:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:53.779906406 +0000 UTC m=+1545.383319477" watchObservedRunningTime="2025-11-25 14:50:53.781698215 +0000 UTC m=+1545.385111286" Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.818369 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd554d876-nlf5m" event={"ID":"5cde15fa-ca05-443f-b71d-365e01384142","Type":"ContainerStarted","Data":"7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a"} Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.830500 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" event={"ID":"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2","Type":"ContainerDied","Data":"f3d684182abf8976e5679c997abff682cacb25f1366ea3034dad70428eb66c1e"} Nov 25 14:50:53 crc kubenswrapper[4879]: I1125 14:50:53.830556 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3d684182abf8976e5679c997abff682cacb25f1366ea3034dad70428eb66c1e" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:53.878373 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d6586055-fe28-486c-8733-032241ccf52c","Type":"ContainerStarted","Data":"c89cef2a67500649d1fa65ef9421900c98cffa9d639f4dcb0328bc851c682a4e"} Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:53.948647 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.712463287 podStartE2EDuration="11.948622534s" podCreationTimestamp="2025-11-25 14:50:42 +0000 UTC" firstStartedPulling="2025-11-25 14:50:44.004719768 +0000 UTC m=+1535.608132839" lastFinishedPulling="2025-11-25 14:50:49.240879015 +0000 UTC m=+1540.844292086" observedRunningTime="2025-11-25 14:50:53.912591471 +0000 UTC m=+1545.516004572" watchObservedRunningTime="2025-11-25 14:50:53.948622534 +0000 UTC m=+1545.552035605" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.538037 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.650076 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.654438 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-nb\") pod \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.654658 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-svc\") pod \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.654740 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-sb\") pod \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.654795 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-swift-storage-0\") pod \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.654881 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-config\") pod \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.654924 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2gf5\" (UniqueName: \"kubernetes.io/projected/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-kube-api-access-k2gf5\") pod \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\" (UID: \"0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2\") " Nov 25 14:50:54 crc kubenswrapper[4879]: E1125 14:50:54.662990 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894bf77b_065e_4330_b45e_cf11903b9b5b.slice/crio-e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894bf77b_065e_4330_b45e_cf11903b9b5b.slice/crio-conmon-e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894bf77b_065e_4330_b45e_cf11903b9b5b.slice/crio-conmon-305ac93a386e6093dfe88ea6ea13e1ae350e4a4fb6216742eecd607ddca948c2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod894bf77b_065e_4330_b45e_cf11903b9b5b.slice/crio-conmon-e47a66dcec5ec2389118153138801966582932ef0fc74f068a3d7f33272c3cb3.scope\": RecentStats: unable to find data in memory cache]" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.711838 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-kube-api-access-k2gf5" (OuterVolumeSpecName: "kube-api-access-k2gf5") pod "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" (UID: "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2"). InnerVolumeSpecName "kube-api-access-k2gf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.762542 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2gf5\" (UniqueName: \"kubernetes.io/projected/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-kube-api-access-k2gf5\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.810089 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" (UID: "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.845615 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" (UID: "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.868993 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.869032 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.869781 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-config" (OuterVolumeSpecName: "config") pod "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" (UID: "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.896592 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" (UID: "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.897423 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" (UID: "0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.980909 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.980934 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.980946 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995066 4879 generic.go:334] "Generic (PLEG): container finished" podID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerID="305ac93a386e6093dfe88ea6ea13e1ae350e4a4fb6216742eecd607ddca948c2" exitCode=0 Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995094 4879 generic.go:334] "Generic (PLEG): container finished" podID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerID="e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf" exitCode=2 Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995102 4879 generic.go:334] "Generic (PLEG): container finished" podID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerID="68b67372f1b83df26bf2e310bb04308ab0f841ffbb6e77c1cdb91b15e8d05f37" exitCode=0 Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995109 4879 generic.go:334] "Generic (PLEG): container finished" podID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerID="e47a66dcec5ec2389118153138801966582932ef0fc74f068a3d7f33272c3cb3" exitCode=0 Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995168 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerDied","Data":"305ac93a386e6093dfe88ea6ea13e1ae350e4a4fb6216742eecd607ddca948c2"} Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995573 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerDied","Data":"e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf"} Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995584 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerDied","Data":"68b67372f1b83df26bf2e310bb04308ab0f841ffbb6e77c1cdb91b15e8d05f37"} Nov 25 14:50:54 crc kubenswrapper[4879]: I1125 14:50:54.995593 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerDied","Data":"e47a66dcec5ec2389118153138801966582932ef0fc74f068a3d7f33272c3cb3"} Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.004441 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" event={"ID":"dc3c22fd-7723-4f81-af93-2cf3a150cd08","Type":"ContainerStarted","Data":"0a2350d0bd8a064f60a957cc924c67c5745fb1c12d380407e406139a368da23b"} Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.011208 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d6bd97c5-5ndpb" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.019012 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743","Type":"ContainerStarted","Data":"a615bf4f3b7912a99a0cdec193303baa0cc525a4b6dd7eb9274677bbdf92573b"} Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.123185 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-5ndpb"] Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.132925 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.140873 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d6bd97c5-5ndpb"] Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.291519 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-scripts\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.291639 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2kfg\" (UniqueName: \"kubernetes.io/projected/894bf77b-065e-4330-b45e-cf11903b9b5b-kube-api-access-c2kfg\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.291727 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-config-data\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.291805 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-run-httpd\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.291836 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-sg-core-conf-yaml\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.292208 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-combined-ca-bundle\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.292303 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-log-httpd\") pod \"894bf77b-065e-4330-b45e-cf11903b9b5b\" (UID: \"894bf77b-065e-4330-b45e-cf11903b9b5b\") " Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.292992 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.293349 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.298396 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894bf77b-065e-4330-b45e-cf11903b9b5b-kube-api-access-c2kfg" (OuterVolumeSpecName: "kube-api-access-c2kfg") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "kube-api-access-c2kfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.314577 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-scripts" (OuterVolumeSpecName: "scripts") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.367263 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.401535 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.401581 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.401595 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2kfg\" (UniqueName: \"kubernetes.io/projected/894bf77b-065e-4330-b45e-cf11903b9b5b-kube-api-access-c2kfg\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.401606 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/894bf77b-065e-4330-b45e-cf11903b9b5b-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.401621 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.483328 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.505289 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.617274 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-config-data" (OuterVolumeSpecName: "config-data") pod "894bf77b-065e-4330-b45e-cf11903b9b5b" (UID: "894bf77b-065e-4330-b45e-cf11903b9b5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.664168 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" path="/var/lib/kubelet/pods/0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2/volumes" Nov 25 14:50:55 crc kubenswrapper[4879]: I1125 14:50:55.710788 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/894bf77b-065e-4330-b45e-cf11903b9b5b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.110886 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"894bf77b-065e-4330-b45e-cf11903b9b5b","Type":"ContainerDied","Data":"af5d6de484b09fbb011bf123b39f5ab7a555ff8a6296fc9f246ee4015cf139e2"} Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.110974 4879 scope.go:117] "RemoveContainer" containerID="305ac93a386e6093dfe88ea6ea13e1ae350e4a4fb6216742eecd607ddca948c2" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.111102 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.130233 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd554d876-nlf5m" event={"ID":"5cde15fa-ca05-443f-b71d-365e01384142","Type":"ContainerStarted","Data":"f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d"} Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.130571 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.170540 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.180554 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1cc2c187-456f-439a-a4b2-33dda7946308","Type":"ContainerStarted","Data":"af243b8f2bb1e9dcb67147292116b8c6f0d542da633f05a48c0fdb0824119183"} Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.217257 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.221965 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"56f930a5-3344-4b7f-90d4-10a4b758e740","Type":"ContainerStarted","Data":"ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745"} Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.222153 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.233164 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5fd554d876-nlf5m" podStartSLOduration=7.233138178 podStartE2EDuration="7.233138178s" podCreationTimestamp="2025-11-25 14:50:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:56.191987084 +0000 UTC m=+1547.795400155" watchObservedRunningTime="2025-11-25 14:50:56.233138178 +0000 UTC m=+1547.836551269" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.236199 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:56 crc kubenswrapper[4879]: E1125 14:50:56.236755 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="proxy-httpd" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.236779 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="proxy-httpd" Nov 25 14:50:56 crc kubenswrapper[4879]: E1125 14:50:56.236807 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" containerName="init" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.236818 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" containerName="init" Nov 25 14:50:56 crc kubenswrapper[4879]: E1125 14:50:56.236842 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="sg-core" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.236851 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="sg-core" Nov 25 14:50:56 crc kubenswrapper[4879]: E1125 14:50:56.236869 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-central-agent" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.236877 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-central-agent" Nov 25 14:50:56 crc kubenswrapper[4879]: E1125 14:50:56.236909 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-notification-agent" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.236919 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-notification-agent" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.237151 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d5c8d5c-a087-4850-ad8a-ceaf051a8ff2" containerName="init" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.237179 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="proxy-httpd" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.237199 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-central-agent" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.237216 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="ceilometer-notification-agent" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.237228 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" containerName="sg-core" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.239284 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.242899 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.246821 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.264322 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.279017 4879 scope.go:117] "RemoveContainer" containerID="e850fe9f3ccb47be559bbfbcd667e743f203fd45fdac1d78c838ec6092768dcf" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.283491 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.2834646450000005 podStartE2EDuration="7.283464645s" podCreationTimestamp="2025-11-25 14:50:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:56.245322593 +0000 UTC m=+1547.848735664" watchObservedRunningTime="2025-11-25 14:50:56.283464645 +0000 UTC m=+1547.886877716" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.325965 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.326020 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.326056 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b7p5\" (UniqueName: \"kubernetes.io/projected/ec415456-05f9-4430-ae5f-bf197f25e085-kube-api-access-8b7p5\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.326113 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-log-httpd\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.326185 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-run-httpd\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.326241 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-config-data\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.326375 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-scripts\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.336996 4879 scope.go:117] "RemoveContainer" containerID="68b67372f1b83df26bf2e310bb04308ab0f841ffbb6e77c1cdb91b15e8d05f37" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431664 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-scripts\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431727 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431750 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431776 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b7p5\" (UniqueName: \"kubernetes.io/projected/ec415456-05f9-4430-ae5f-bf197f25e085-kube-api-access-8b7p5\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431815 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-log-httpd\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431859 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-run-httpd\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.431892 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-config-data\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.435099 4879 scope.go:117] "RemoveContainer" containerID="e47a66dcec5ec2389118153138801966582932ef0fc74f068a3d7f33272c3cb3" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.435704 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-log-httpd\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.437438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-run-httpd\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.438272 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-config-data\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.438813 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-scripts\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.457023 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.467944 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.483295 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b7p5\" (UniqueName: \"kubernetes.io/projected/ec415456-05f9-4430-ae5f-bf197f25e085-kube-api-access-8b7p5\") pod \"ceilometer-0\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.499419 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.520219 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.576288 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.742824 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6f68cc547f-bvplz"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.744511 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.747643 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-427bs" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.748003 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.761646 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.806310 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-76bcc454bb-qmx8f"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.807880 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.822474 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.839378 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-76bcc454bb-qmx8f"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.863282 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-combined-ca-bundle\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.863342 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.863420 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5hcr\" (UniqueName: \"kubernetes.io/projected/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-kube-api-access-f5hcr\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.863466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data-custom\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.863573 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-logs\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.880400 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6f68cc547f-bvplz"] Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967347 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-logs\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967409 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-combined-ca-bundle\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967433 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967476 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-combined-ca-bundle\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967504 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5hcr\" (UniqueName: \"kubernetes.io/projected/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-kube-api-access-f5hcr\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967664 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-logs\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967684 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data-custom\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967708 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data-custom\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.967765 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd6pt\" (UniqueName: \"kubernetes.io/projected/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-kube-api-access-nd6pt\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.968256 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-logs\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.973724 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-combined-ca-bundle\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:56 crc kubenswrapper[4879]: I1125 14:50:56.981609 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data-custom\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:56.988863 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:56.992791 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-774db89647-jg5tc"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:56.993091 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-774db89647-jg5tc" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerName="dnsmasq-dns" containerID="cri-o://17138fdd64e95fa1d901748a9f1473a1161c4a782199ea9044150a64c235aa13" gracePeriod=10 Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.002023 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-psxqm"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.004647 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.011648 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-psxqm"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.011951 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5hcr\" (UniqueName: \"kubernetes.io/projected/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-kube-api-access-f5hcr\") pod \"barbican-worker-6f68cc547f-bvplz\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.060614 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5bf5d5964-kwtzr"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.062658 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.075664 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.078222 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-combined-ca-bundle\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.078270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.078313 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-logs\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.078353 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data-custom\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.078410 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd6pt\" (UniqueName: \"kubernetes.io/projected/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-kube-api-access-nd6pt\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.081174 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-logs\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.090549 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5bf5d5964-kwtzr"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.092687 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data-custom\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.095708 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.100376 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-combined-ca-bundle\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.152653 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd6pt\" (UniqueName: \"kubernetes.io/projected/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-kube-api-access-nd6pt\") pod \"barbican-keystone-listener-76bcc454bb-qmx8f\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181394 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181458 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-config\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181488 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data-custom\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181524 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5614ea68-0fbe-4773-802b-4e982dd987a5-logs\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181556 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-svc\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181639 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-combined-ca-bundle\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181673 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b698w\" (UniqueName: \"kubernetes.io/projected/b8ec0861-1e73-4738-8c15-157fbc5418d4-kube-api-access-b698w\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181734 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181819 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxfkn\" (UniqueName: \"kubernetes.io/projected/5614ea68-0fbe-4773-802b-4e982dd987a5-kube-api-access-xxfkn\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181849 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.181868 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.246728 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743","Type":"ContainerStarted","Data":"05364380d2e63d55c39d597c799b96575c6c47041dc20bb528f86650c9382e54"} Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.248378 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.255039 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1cc2c187-456f-439a-a4b2-33dda7946308","Type":"ContainerStarted","Data":"04433574a192b4e76f4a1047133e9f8463a81e6a8e9269e4a2083629111afa0d"} Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.291809 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxfkn\" (UniqueName: \"kubernetes.io/projected/5614ea68-0fbe-4773-802b-4e982dd987a5-kube-api-access-xxfkn\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.291862 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.291887 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.291943 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292011 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-config\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292039 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data-custom\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292075 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5614ea68-0fbe-4773-802b-4e982dd987a5-logs\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292106 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-svc\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292207 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-combined-ca-bundle\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292251 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b698w\" (UniqueName: \"kubernetes.io/projected/b8ec0861-1e73-4738-8c15-157fbc5418d4-kube-api-access-b698w\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.292290 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.293319 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-config\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.293411 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.299478 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.300203 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.300745 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.301837 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-svc\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.302083 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5614ea68-0fbe-4773-802b-4e982dd987a5-logs\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.302733 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data-custom\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.306289 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.307809 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-combined-ca-bundle\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.311112 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.311092464 podStartE2EDuration="6.311092464s" podCreationTimestamp="2025-11-25 14:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:57.290471245 +0000 UTC m=+1548.893884326" watchObservedRunningTime="2025-11-25 14:50:57.311092464 +0000 UTC m=+1548.914505525" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.318577 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxfkn\" (UniqueName: \"kubernetes.io/projected/5614ea68-0fbe-4773-802b-4e982dd987a5-kube-api-access-xxfkn\") pod \"barbican-api-5bf5d5964-kwtzr\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.328906 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b698w\" (UniqueName: \"kubernetes.io/projected/b8ec0861-1e73-4738-8c15-157fbc5418d4-kube-api-access-b698w\") pod \"dnsmasq-dns-6578955fd5-psxqm\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.379669 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.429872 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.607729 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.630822 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.707549 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894bf77b-065e-4330-b45e-cf11903b9b5b" path="/var/lib/kubelet/pods/894bf77b-065e-4330-b45e-cf11903b9b5b/volumes" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.856742 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-66d45fd54f-j9t95"] Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.867332 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.873972 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.874243 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Nov 25 14:50:57 crc kubenswrapper[4879]: I1125 14:50:57.890478 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66d45fd54f-j9t95"] Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025328 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-combined-ca-bundle\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025390 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-ovndb-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025412 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-config\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025445 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-internal-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-public-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025530 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhnpj\" (UniqueName: \"kubernetes.io/projected/36963cc9-ce9a-4f42-81ac-1a5afde50592-kube-api-access-xhnpj\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.025638 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-httpd-config\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128352 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhnpj\" (UniqueName: \"kubernetes.io/projected/36963cc9-ce9a-4f42-81ac-1a5afde50592-kube-api-access-xhnpj\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128480 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-httpd-config\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128548 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-combined-ca-bundle\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128577 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-ovndb-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128599 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-config\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128639 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-internal-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.128658 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-public-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.143527 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-combined-ca-bundle\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.144682 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-internal-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.164558 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-config\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.164716 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-httpd-config\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.164785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-ovndb-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.165288 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-public-tls-certs\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.182480 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhnpj\" (UniqueName: \"kubernetes.io/projected/36963cc9-ce9a-4f42-81ac-1a5afde50592-kube-api-access-xhnpj\") pod \"neutron-66d45fd54f-j9t95\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.201730 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-76bcc454bb-qmx8f"] Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.229875 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.288642 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.306082 4879 generic.go:334] "Generic (PLEG): container finished" podID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerID="17138fdd64e95fa1d901748a9f1473a1161c4a782199ea9044150a64c235aa13" exitCode=0 Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.306168 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-774db89647-jg5tc" event={"ID":"77312b8f-d676-43b5-bf82-b84f7ddb33c5","Type":"ContainerDied","Data":"17138fdd64e95fa1d901748a9f1473a1161c4a782199ea9044150a64c235aa13"} Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.307755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerStarted","Data":"4f9475d0d198120914c0e043781774eaf8f02a71021391e84a032bf8ff6d9d2e"} Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.312051 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" event={"ID":"6b33fd48-a5ae-4916-93f5-0675f1cc8bca","Type":"ContainerStarted","Data":"49e7eb55685f9b3286f992abdfc27d431d9314de3f332b4da17a6cf3f8a720c6"} Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.365166 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743","Type":"ContainerStarted","Data":"41b342be3eecc98813aaf9b468892bd643aa0e34ac73a85cd3d396c5e9f92e0a"} Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.434244 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.439994 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.439976912 podStartE2EDuration="6.439976912s" podCreationTimestamp="2025-11-25 14:50:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:50:58.426762037 +0000 UTC m=+1550.030175108" watchObservedRunningTime="2025-11-25 14:50:58.439976912 +0000 UTC m=+1550.043389983" Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.528303 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6f68cc547f-bvplz"] Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.733164 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5bf5d5964-kwtzr"] Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.786091 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-psxqm"] Nov 25 14:50:58 crc kubenswrapper[4879]: I1125 14:50:58.874900 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.038890 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-nb\") pod \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.038984 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-svc\") pod \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.039027 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-config\") pod \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.039065 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-swift-storage-0\") pod \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.039549 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-sb\") pod \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.039630 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5fkw\" (UniqueName: \"kubernetes.io/projected/77312b8f-d676-43b5-bf82-b84f7ddb33c5-kube-api-access-c5fkw\") pod \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\" (UID: \"77312b8f-d676-43b5-bf82-b84f7ddb33c5\") " Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.079929 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77312b8f-d676-43b5-bf82-b84f7ddb33c5-kube-api-access-c5fkw" (OuterVolumeSpecName: "kube-api-access-c5fkw") pod "77312b8f-d676-43b5-bf82-b84f7ddb33c5" (UID: "77312b8f-d676-43b5-bf82-b84f7ddb33c5"). InnerVolumeSpecName "kube-api-access-c5fkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.142563 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5fkw\" (UniqueName: \"kubernetes.io/projected/77312b8f-d676-43b5-bf82-b84f7ddb33c5-kube-api-access-c5fkw\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.245249 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.245391 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.312676 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-66d45fd54f-j9t95"] Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.383447 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "77312b8f-d676-43b5-bf82-b84f7ddb33c5" (UID: "77312b8f-d676-43b5-bf82-b84f7ddb33c5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.385315 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.396847 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "77312b8f-d676-43b5-bf82-b84f7ddb33c5" (UID: "77312b8f-d676-43b5-bf82-b84f7ddb33c5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.400384 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" event={"ID":"b8ec0861-1e73-4738-8c15-157fbc5418d4","Type":"ContainerStarted","Data":"d73a3c6430764643a16f850677a4e3fd28e79e79c16a826b2156b960b10daf19"} Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.414160 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bf5d5964-kwtzr" event={"ID":"5614ea68-0fbe-4773-802b-4e982dd987a5","Type":"ContainerStarted","Data":"d382750139818a7f50f7d0502759024e496c776a1a7c9bad5ac981a44db64c8a"} Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.417251 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-config" (OuterVolumeSpecName: "config") pod "77312b8f-d676-43b5-bf82-b84f7ddb33c5" (UID: "77312b8f-d676-43b5-bf82-b84f7ddb33c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.422318 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-774db89647-jg5tc" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.422342 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-774db89647-jg5tc" event={"ID":"77312b8f-d676-43b5-bf82-b84f7ddb33c5","Type":"ContainerDied","Data":"263bb49c89503a2b102d1037dbd2611579a3288b3fcbddab165cac908df22d89"} Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.422396 4879 scope.go:117] "RemoveContainer" containerID="17138fdd64e95fa1d901748a9f1473a1161c4a782199ea9044150a64c235aa13" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.426425 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d45fd54f-j9t95" event={"ID":"36963cc9-ce9a-4f42-81ac-1a5afde50592","Type":"ContainerStarted","Data":"d2dd424392a1048a563080d38949b43ade371b2ac15e8e2b0e87586354fdebbc"} Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.429193 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerStarted","Data":"faa92de00381134f57f76cfc91fabde2db6b607bff0fd5ef82014983b232a9fb"} Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.435309 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="cinder-scheduler" containerID="cri-o://76cb900db533acd1b9a1a1817de0b202c3c2b3883a680ae32a7f092cd5122448" gracePeriod=30 Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.435806 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f68cc547f-bvplz" event={"ID":"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b","Type":"ContainerStarted","Data":"07f03abad3e8b757760025bf0305e5062df952236661383eba0f291893616873"} Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.436266 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="probe" containerID="cri-o://c89cef2a67500649d1fa65ef9421900c98cffa9d639f4dcb0328bc851c682a4e" gracePeriod=30 Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.441548 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "77312b8f-d676-43b5-bf82-b84f7ddb33c5" (UID: "77312b8f-d676-43b5-bf82-b84f7ddb33c5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.449617 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.449654 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.449667 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.449681 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.577522 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "77312b8f-d676-43b5-bf82-b84f7ddb33c5" (UID: "77312b8f-d676-43b5-bf82-b84f7ddb33c5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.653691 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77312b8f-d676-43b5-bf82-b84f7ddb33c5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.706345 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:50:59 crc kubenswrapper[4879]: I1125 14:50:59.922569 4879 scope.go:117] "RemoveContainer" containerID="2c8d164d2af1c326ff7281b306cee67d5093d123752567a73004d578f5d96aa2" Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.158378 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-774db89647-jg5tc"] Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.189492 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-774db89647-jg5tc"] Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.374316 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:00 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:00 crc kubenswrapper[4879]: > Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.467250 4879 generic.go:334] "Generic (PLEG): container finished" podID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerID="7f4a59c2fffdc1d804886afcede4c4e8541a71535286b6d1bba3ae15c99c20ba" exitCode=0 Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.467352 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" event={"ID":"b8ec0861-1e73-4738-8c15-157fbc5418d4","Type":"ContainerDied","Data":"7f4a59c2fffdc1d804886afcede4c4e8541a71535286b6d1bba3ae15c99c20ba"} Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.477188 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bf5d5964-kwtzr" event={"ID":"5614ea68-0fbe-4773-802b-4e982dd987a5","Type":"ContainerStarted","Data":"4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20"} Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.477244 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bf5d5964-kwtzr" event={"ID":"5614ea68-0fbe-4773-802b-4e982dd987a5","Type":"ContainerStarted","Data":"6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7"} Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.478229 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.478254 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.510898 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d45fd54f-j9t95" event={"ID":"36963cc9-ce9a-4f42-81ac-1a5afde50592","Type":"ContainerStarted","Data":"28f1ee62e8f0217666143217990ac5ddfaf70ba0a32edec6f7c649da3cf673b3"} Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.533188 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5bf5d5964-kwtzr" podStartSLOduration=3.533169305 podStartE2EDuration="3.533169305s" podCreationTimestamp="2025-11-25 14:50:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:51:00.530489411 +0000 UTC m=+1552.133902482" watchObservedRunningTime="2025-11-25 14:51:00.533169305 +0000 UTC m=+1552.136582376" Nov 25 14:51:00 crc kubenswrapper[4879]: I1125 14:51:00.879901 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7kdzs"] Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.530628 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d45fd54f-j9t95" event={"ID":"36963cc9-ce9a-4f42-81ac-1a5afde50592","Type":"ContainerStarted","Data":"05f0aa550451ea4ec5584e060eeaaaa5e3700f2bd21f70929ff15b059676284f"} Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.531521 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.537509 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerStarted","Data":"df6c1ccb73577cdb44c02dd9a9d0e9eb02b876ed7b84e67433ef1931821c1262"} Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.556718 4879 generic.go:334] "Generic (PLEG): container finished" podID="d6586055-fe28-486c-8733-032241ccf52c" containerID="c89cef2a67500649d1fa65ef9421900c98cffa9d639f4dcb0328bc851c682a4e" exitCode=0 Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.556799 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d6586055-fe28-486c-8733-032241ccf52c","Type":"ContainerDied","Data":"c89cef2a67500649d1fa65ef9421900c98cffa9d639f4dcb0328bc851c682a4e"} Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.577927 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-66d45fd54f-j9t95" podStartSLOduration=4.577889785 podStartE2EDuration="4.577889785s" podCreationTimestamp="2025-11-25 14:50:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:51:01.561401933 +0000 UTC m=+1553.164815004" watchObservedRunningTime="2025-11-25 14:51:01.577889785 +0000 UTC m=+1553.181302856" Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.578201 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" event={"ID":"b8ec0861-1e73-4738-8c15-157fbc5418d4","Type":"ContainerStarted","Data":"156acc7c316592f8270ffab312637d1aa68721d3bb28b77e78eefde693e3b452"} Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.578354 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.608821 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" podStartSLOduration=5.608800584 podStartE2EDuration="5.608800584s" podCreationTimestamp="2025-11-25 14:50:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:51:01.601371005 +0000 UTC m=+1553.204784086" watchObservedRunningTime="2025-11-25 14:51:01.608800584 +0000 UTC m=+1553.212213655" Nov 25 14:51:01 crc kubenswrapper[4879]: I1125 14:51:01.721590 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" path="/var/lib/kubelet/pods/77312b8f-d676-43b5-bf82-b84f7ddb33c5/volumes" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.058011 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7b6ffb6c8b-jdb4h"] Nov 25 14:51:02 crc kubenswrapper[4879]: E1125 14:51:02.058528 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerName="dnsmasq-dns" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.058551 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerName="dnsmasq-dns" Nov 25 14:51:02 crc kubenswrapper[4879]: E1125 14:51:02.058568 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerName="init" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.058577 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerName="init" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.058839 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="77312b8f-d676-43b5-bf82-b84f7ddb33c5" containerName="dnsmasq-dns" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.060175 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.064599 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.064759 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.085476 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b6ffb6c8b-jdb4h"] Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144674 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-logs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144730 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144768 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h27x\" (UniqueName: \"kubernetes.io/projected/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-kube-api-access-4h27x\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144788 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data-custom\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144822 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-internal-tls-certs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144841 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-combined-ca-bundle\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.144889 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-public-tls-certs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.240836 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.241358 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246239 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-public-tls-certs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246326 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-logs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246367 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h27x\" (UniqueName: \"kubernetes.io/projected/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-kube-api-access-4h27x\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246422 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data-custom\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246459 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-internal-tls-certs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.246478 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-combined-ca-bundle\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.248866 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-logs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.252798 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-public-tls-certs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.254640 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-combined-ca-bundle\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.256607 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.262843 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data-custom\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.275895 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h27x\" (UniqueName: \"kubernetes.io/projected/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-kube-api-access-4h27x\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.290551 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-internal-tls-certs\") pod \"barbican-api-7b6ffb6c8b-jdb4h\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.328241 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.385815 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.389241 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.602067 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7kdzs" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="registry-server" containerID="cri-o://cd26bef6451a151cf7d6475fb5b89a3f24ea96186c01549c08a6edd44d4b67f8" gracePeriod=2 Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.602385 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 14:51:02 crc kubenswrapper[4879]: I1125 14:51:02.602409 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.074279 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-n2454" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:03 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:03 crc kubenswrapper[4879]: > Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.650870 4879 generic.go:334] "Generic (PLEG): container finished" podID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerID="cd26bef6451a151cf7d6475fb5b89a3f24ea96186c01549c08a6edd44d4b67f8" exitCode=0 Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.665038 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerDied","Data":"cd26bef6451a151cf7d6475fb5b89a3f24ea96186c01549c08a6edd44d4b67f8"} Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.671404 4879 generic.go:334] "Generic (PLEG): container finished" podID="d6586055-fe28-486c-8733-032241ccf52c" containerID="76cb900db533acd1b9a1a1817de0b202c3c2b3883a680ae32a7f092cd5122448" exitCode=0 Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.671500 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d6586055-fe28-486c-8733-032241ccf52c","Type":"ContainerDied","Data":"76cb900db533acd1b9a1a1817de0b202c3c2b3883a680ae32a7f092cd5122448"} Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.707226 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.707282 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.798660 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.801024 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.854221 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 25 14:51:03 crc kubenswrapper[4879]: I1125 14:51:03.956088 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:04 crc kubenswrapper[4879]: I1125 14:51:04.680756 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:51:04 crc kubenswrapper[4879]: I1125 14:51:04.681072 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:51:04 crc kubenswrapper[4879]: I1125 14:51:04.681389 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:04 crc kubenswrapper[4879]: I1125 14:51:04.681447 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:05 crc kubenswrapper[4879]: I1125 14:51:05.169945 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 14:51:05 crc kubenswrapper[4879]: I1125 14:51:05.688841 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:51:05 crc kubenswrapper[4879]: I1125 14:51:05.807312 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.037805 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.156008 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ng8mx\" (UniqueName: \"kubernetes.io/projected/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-kube-api-access-ng8mx\") pod \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.156238 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-catalog-content\") pod \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.156333 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-utilities\") pod \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\" (UID: \"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.157197 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-utilities" (OuterVolumeSpecName: "utilities") pod "8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" (UID: "8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.168181 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-kube-api-access-ng8mx" (OuterVolumeSpecName: "kube-api-access-ng8mx") pod "8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" (UID: "8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e"). InnerVolumeSpecName "kube-api-access-ng8mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.254118 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" (UID: "8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.262414 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.262455 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.262465 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ng8mx\" (UniqueName: \"kubernetes.io/projected/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e-kube-api-access-ng8mx\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.772293 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7kdzs" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.772369 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7kdzs" event={"ID":"8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e","Type":"ContainerDied","Data":"f61807d800a523eb60ef669ed0a0ee6c27196ac01b650534b03debacb3929e60"} Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.773274 4879 scope.go:117] "RemoveContainer" containerID="cd26bef6451a151cf7d6475fb5b89a3f24ea96186c01549c08a6edd44d4b67f8" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.797647 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.918180 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7kdzs"] Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.935495 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7kdzs"] Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.978799 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data\") pod \"d6586055-fe28-486c-8733-032241ccf52c\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.978935 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-scripts\") pod \"d6586055-fe28-486c-8733-032241ccf52c\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.978974 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-combined-ca-bundle\") pod \"d6586055-fe28-486c-8733-032241ccf52c\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.979019 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data-custom\") pod \"d6586055-fe28-486c-8733-032241ccf52c\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.979043 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zmz4\" (UniqueName: \"kubernetes.io/projected/d6586055-fe28-486c-8733-032241ccf52c-kube-api-access-4zmz4\") pod \"d6586055-fe28-486c-8733-032241ccf52c\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.979070 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6586055-fe28-486c-8733-032241ccf52c-etc-machine-id\") pod \"d6586055-fe28-486c-8733-032241ccf52c\" (UID: \"d6586055-fe28-486c-8733-032241ccf52c\") " Nov 25 14:51:06 crc kubenswrapper[4879]: I1125 14:51:06.979484 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d6586055-fe28-486c-8733-032241ccf52c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d6586055-fe28-486c-8733-032241ccf52c" (UID: "d6586055-fe28-486c-8733-032241ccf52c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:06.999344 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-scripts" (OuterVolumeSpecName: "scripts") pod "d6586055-fe28-486c-8733-032241ccf52c" (UID: "d6586055-fe28-486c-8733-032241ccf52c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.000363 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6586055-fe28-486c-8733-032241ccf52c-kube-api-access-4zmz4" (OuterVolumeSpecName: "kube-api-access-4zmz4") pod "d6586055-fe28-486c-8733-032241ccf52c" (UID: "d6586055-fe28-486c-8733-032241ccf52c"). InnerVolumeSpecName "kube-api-access-4zmz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.014880 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d6586055-fe28-486c-8733-032241ccf52c" (UID: "d6586055-fe28-486c-8733-032241ccf52c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.081860 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.081894 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.081909 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zmz4\" (UniqueName: \"kubernetes.io/projected/d6586055-fe28-486c-8733-032241ccf52c-kube-api-access-4zmz4\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.081921 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d6586055-fe28-486c-8733-032241ccf52c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.190485 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b6ffb6c8b-jdb4h"] Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.220624 4879 scope.go:117] "RemoveContainer" containerID="489841467fceac4c06449f6b44bb905a9281d72e2fa5d56769ca6ab00da80995" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.220625 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6586055-fe28-486c-8733-032241ccf52c" (UID: "d6586055-fe28-486c-8733-032241ccf52c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.226562 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data" (OuterVolumeSpecName: "config-data") pod "d6586055-fe28-486c-8733-032241ccf52c" (UID: "d6586055-fe28-486c-8733-032241ccf52c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.291514 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.291555 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6586055-fe28-486c-8733-032241ccf52c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.383350 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.470793 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-gzfqq"] Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.471047 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="dnsmasq-dns" containerID="cri-o://fd2926ff8937b1c81e60f15d2f7dd9934446409095fca0316db0a6cc9bd87c41" gracePeriod=10 Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.541790 4879 scope.go:117] "RemoveContainer" containerID="e1a0ed94e274e01c0a03589235b6fc884ecd3d5b6fab6cb2dd4d3bfb1dc4c2d6" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.665363 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" path="/var/lib/kubelet/pods/8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e/volumes" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.799470 4879 generic.go:334] "Generic (PLEG): container finished" podID="9da17217-b666-43b4-8061-84cdd4cedeac" containerID="fd2926ff8937b1c81e60f15d2f7dd9934446409095fca0316db0a6cc9bd87c41" exitCode=0 Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.799529 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" event={"ID":"9da17217-b666-43b4-8061-84cdd4cedeac","Type":"ContainerDied","Data":"fd2926ff8937b1c81e60f15d2f7dd9934446409095fca0316db0a6cc9bd87c41"} Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.824417 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" event={"ID":"6b33fd48-a5ae-4916-93f5-0675f1cc8bca","Type":"ContainerStarted","Data":"4bbe35807a1a8ba04bc01121bc68265add08a5e4d443ccd4ad29d5ac1402a13e"} Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.834372 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" event={"ID":"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb","Type":"ContainerStarted","Data":"860ae33e8db0f2927b43ebc352e3a7345732946f001b596eec9ff56b2f703de7"} Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.843586 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f68cc547f-bvplz" event={"ID":"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b","Type":"ContainerStarted","Data":"a80540e1f8d0795a7d0f08a859ed3eccf014e5be89d1ab61902666a3115d369e"} Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.857536 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"d6586055-fe28-486c-8733-032241ccf52c","Type":"ContainerDied","Data":"eb8ac9df4846e030cc4f99022307ea1ef3e9b61bde8b6441e728a6b636224a8d"} Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.857632 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.910501 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.927010 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944179 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:51:07 crc kubenswrapper[4879]: E1125 14:51:07.944624 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="probe" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944641 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="probe" Nov 25 14:51:07 crc kubenswrapper[4879]: E1125 14:51:07.944651 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="registry-server" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944658 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="registry-server" Nov 25 14:51:07 crc kubenswrapper[4879]: E1125 14:51:07.944676 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="cinder-scheduler" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944683 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="cinder-scheduler" Nov 25 14:51:07 crc kubenswrapper[4879]: E1125 14:51:07.944693 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="extract-utilities" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944698 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="extract-utilities" Nov 25 14:51:07 crc kubenswrapper[4879]: E1125 14:51:07.944710 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="extract-content" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944715 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="extract-content" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944901 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e9bd5a1-f7be-4b52-9d78-0bdf90872f1e" containerName="registry-server" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944924 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="cinder-scheduler" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.944951 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6586055-fe28-486c-8733-032241ccf52c" containerName="probe" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.946009 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.952239 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:51:07 crc kubenswrapper[4879]: I1125 14:51:07.953643 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.100773 4879 scope.go:117] "RemoveContainer" containerID="c89cef2a67500649d1fa65ef9421900c98cffa9d639f4dcb0328bc851c682a4e" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.120107 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.120206 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-746dz\" (UniqueName: \"kubernetes.io/projected/dd903399-aa23-4f0d-93fc-4c7a5f454750-kube-api-access-746dz\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.120265 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-scripts\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.120313 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.120364 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.120399 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd903399-aa23-4f0d-93fc-4c7a5f454750-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.222852 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.222921 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-746dz\" (UniqueName: \"kubernetes.io/projected/dd903399-aa23-4f0d-93fc-4c7a5f454750-kube-api-access-746dz\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.222980 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-scripts\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.223038 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.223086 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.223146 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd903399-aa23-4f0d-93fc-4c7a5f454750-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.223240 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd903399-aa23-4f0d-93fc-4c7a5f454750-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.229830 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-scripts\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.232406 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.232715 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.233196 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.252259 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-746dz\" (UniqueName: \"kubernetes.io/projected/dd903399-aa23-4f0d-93fc-4c7a5f454750-kube-api-access-746dz\") pod \"cinder-scheduler-0\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.306041 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.331780 4879 scope.go:117] "RemoveContainer" containerID="76cb900db533acd1b9a1a1817de0b202c3c2b3883a680ae32a7f092cd5122448" Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.905018 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f68cc547f-bvplz" event={"ID":"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b","Type":"ContainerStarted","Data":"8bab8985c7010c6b6dd0f0feb2d6a0fbbc3be6f8716197ac4c3a366eb018ce99"} Nov 25 14:51:08 crc kubenswrapper[4879]: I1125 14:51:08.933840 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6f68cc547f-bvplz" podStartSLOduration=4.979323766 podStartE2EDuration="12.93382612s" podCreationTimestamp="2025-11-25 14:50:56 +0000 UTC" firstStartedPulling="2025-11-25 14:50:58.641876764 +0000 UTC m=+1550.245289835" lastFinishedPulling="2025-11-25 14:51:06.596379108 +0000 UTC m=+1558.199792189" observedRunningTime="2025-11-25 14:51:08.932932206 +0000 UTC m=+1560.536345277" watchObservedRunningTime="2025-11-25 14:51:08.93382612 +0000 UTC m=+1560.537239191" Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.122846 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.346761 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.347205 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.357014 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.700063 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6586055-fe28-486c-8733-032241ccf52c" path="/var/lib/kubelet/pods/d6586055-fe28-486c-8733-032241ccf52c/volumes" Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.939886 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerStarted","Data":"c51c72ad6482c9685201169ad13a140393c0247ab057ae8f9ea66c4f0b992e3f"} Nov 25 14:51:09 crc kubenswrapper[4879]: I1125 14:51:09.941879 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" event={"ID":"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb","Type":"ContainerStarted","Data":"0085d2813cd8a19fc5a40a396d0d21eeddfa493fb910f17688db09fd0fd10a5e"} Nov 25 14:51:10 crc kubenswrapper[4879]: I1125 14:51:10.218566 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:10 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:10 crc kubenswrapper[4879]: > Nov 25 14:51:10 crc kubenswrapper[4879]: I1125 14:51:10.986360 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" event={"ID":"6b33fd48-a5ae-4916-93f5-0675f1cc8bca","Type":"ContainerStarted","Data":"dd9719475c4335a86036dc249bfd8e7b5fc7dc1a22a25d830de6783cc81e7d70"} Nov 25 14:51:11 crc kubenswrapper[4879]: I1125 14:51:11.484330 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5bf5d5964-kwtzr" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:51:11 crc kubenswrapper[4879]: I1125 14:51:11.950540 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:51:12 crc kubenswrapper[4879]: I1125 14:51:12.030777 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" podStartSLOduration=7.649255929 podStartE2EDuration="16.030759587s" podCreationTimestamp="2025-11-25 14:50:56 +0000 UTC" firstStartedPulling="2025-11-25 14:50:58.213055851 +0000 UTC m=+1549.816468922" lastFinishedPulling="2025-11-25 14:51:06.594559509 +0000 UTC m=+1558.197972580" observedRunningTime="2025-11-25 14:51:12.025325831 +0000 UTC m=+1563.628738902" watchObservedRunningTime="2025-11-25 14:51:12.030759587 +0000 UTC m=+1563.634172658" Nov 25 14:51:12 crc kubenswrapper[4879]: I1125 14:51:12.476303 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5bf5d5964-kwtzr" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:51:13 crc kubenswrapper[4879]: I1125 14:51:13.047163 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-n2454" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:13 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:13 crc kubenswrapper[4879]: > Nov 25 14:51:14 crc kubenswrapper[4879]: I1125 14:51:14.322897 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:51:16 crc kubenswrapper[4879]: I1125 14:51:16.823776 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.152:5353: i/o timeout" Nov 25 14:51:18 crc kubenswrapper[4879]: W1125 14:51:18.236400 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd903399_aa23_4f0d_93fc_4c7a5f454750.slice/crio-cbddf5204eb85902d3478d0bbd23c509d624d02bd2e4159d19a7d70a3dafa120 WatchSource:0}: Error finding container cbddf5204eb85902d3478d0bbd23c509d624d02bd2e4159d19a7d70a3dafa120: Status 404 returned error can't find the container with id cbddf5204eb85902d3478d0bbd23c509d624d02bd2e4159d19a7d70a3dafa120 Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.439648 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.553992 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-nb\") pod \"9da17217-b666-43b4-8061-84cdd4cedeac\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.554514 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t82px\" (UniqueName: \"kubernetes.io/projected/9da17217-b666-43b4-8061-84cdd4cedeac-kube-api-access-t82px\") pod \"9da17217-b666-43b4-8061-84cdd4cedeac\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.554593 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-swift-storage-0\") pod \"9da17217-b666-43b4-8061-84cdd4cedeac\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.554651 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-sb\") pod \"9da17217-b666-43b4-8061-84cdd4cedeac\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.554740 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-config\") pod \"9da17217-b666-43b4-8061-84cdd4cedeac\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.554798 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-svc\") pod \"9da17217-b666-43b4-8061-84cdd4cedeac\" (UID: \"9da17217-b666-43b4-8061-84cdd4cedeac\") " Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.609268 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9da17217-b666-43b4-8061-84cdd4cedeac-kube-api-access-t82px" (OuterVolumeSpecName: "kube-api-access-t82px") pod "9da17217-b666-43b4-8061-84cdd4cedeac" (UID: "9da17217-b666-43b4-8061-84cdd4cedeac"). InnerVolumeSpecName "kube-api-access-t82px". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.661278 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t82px\" (UniqueName: \"kubernetes.io/projected/9da17217-b666-43b4-8061-84cdd4cedeac-kube-api-access-t82px\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.665872 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-config" (OuterVolumeSpecName: "config") pod "9da17217-b666-43b4-8061-84cdd4cedeac" (UID: "9da17217-b666-43b4-8061-84cdd4cedeac"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.684442 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9da17217-b666-43b4-8061-84cdd4cedeac" (UID: "9da17217-b666-43b4-8061-84cdd4cedeac"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.695713 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9da17217-b666-43b4-8061-84cdd4cedeac" (UID: "9da17217-b666-43b4-8061-84cdd4cedeac"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.706902 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9da17217-b666-43b4-8061-84cdd4cedeac" (UID: "9da17217-b666-43b4-8061-84cdd4cedeac"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.731035 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9da17217-b666-43b4-8061-84cdd4cedeac" (UID: "9da17217-b666-43b4-8061-84cdd4cedeac"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.773808 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.773841 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.773853 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.773865 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:18 crc kubenswrapper[4879]: I1125 14:51:18.773876 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9da17217-b666-43b4-8061-84cdd4cedeac-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:19 crc kubenswrapper[4879]: E1125 14:51:19.019370 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified" Nov 25 14:51:19 crc kubenswrapper[4879]: E1125 14:51:19.019804 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-cell0-conductor-db-sync,Image:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CELL_NAME,Value:cell0,ValueFrom:nil,},EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:false,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-conductor-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-66g2w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-cell0-conductor-db-sync-h2tdk_openstack(dc3c22fd-7723-4f81-af93-2cf3a150cd08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 14:51:19 crc kubenswrapper[4879]: E1125 14:51:19.021016 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" podUID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.076952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" event={"ID":"9da17217-b666-43b4-8061-84cdd4cedeac","Type":"ContainerDied","Data":"f3b991d5b35d7f8aa9f550f5fd03581f9d736b865e7df3060ec18a5895e45bbd"} Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.077039 4879 scope.go:117] "RemoveContainer" containerID="fd2926ff8937b1c81e60f15d2f7dd9934446409095fca0316db0a6cc9bd87c41" Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.078468 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.083808 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dd903399-aa23-4f0d-93fc-4c7a5f454750","Type":"ContainerStarted","Data":"cbddf5204eb85902d3478d0bbd23c509d624d02bd2e4159d19a7d70a3dafa120"} Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.095949 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" event={"ID":"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb","Type":"ContainerStarted","Data":"a140b33b646cb8bebf32e5c01b72ae0d72d960de2557e0cf5cf92d5509342139"} Nov 25 14:51:19 crc kubenswrapper[4879]: E1125 14:51:19.142194 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified\\\"\"" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" podUID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.142548 4879 scope.go:117] "RemoveContainer" containerID="ddd11c35fb60cfa566240ed27ace139dbeccacdd9b30b3ea18f885624d4e09ee" Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.176759 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-gzfqq"] Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.192087 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-gzfqq"] Nov 25 14:51:19 crc kubenswrapper[4879]: I1125 14:51:19.664784 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" path="/var/lib/kubelet/pods/9da17217-b666-43b4-8061-84cdd4cedeac/volumes" Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.111671 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dd903399-aa23-4f0d-93fc-4c7a5f454750","Type":"ContainerStarted","Data":"99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750"} Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.118093 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.119430 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.127939 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:20 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:20 crc kubenswrapper[4879]: > Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.136675 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" podStartSLOduration=18.13666196 podStartE2EDuration="18.13666196s" podCreationTimestamp="2025-11-25 14:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:51:20.136188498 +0000 UTC m=+1571.739601569" watchObservedRunningTime="2025-11-25 14:51:20.13666196 +0000 UTC m=+1571.740075031" Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.364440 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-5fd554d876-nlf5m" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.364929 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-5fd554d876-nlf5m" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 25 14:51:20 crc kubenswrapper[4879]: I1125 14:51:20.365416 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-5fd554d876-nlf5m" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-api" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 25 14:51:21 crc kubenswrapper[4879]: I1125 14:51:21.133881 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dd903399-aa23-4f0d-93fc-4c7a5f454750","Type":"ContainerStarted","Data":"6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc"} Nov 25 14:51:21 crc kubenswrapper[4879]: I1125 14:51:21.175997 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=14.175904471 podStartE2EDuration="14.175904471s" podCreationTimestamp="2025-11-25 14:51:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:51:21.168486142 +0000 UTC m=+1572.771899223" watchObservedRunningTime="2025-11-25 14:51:21.175904471 +0000 UTC m=+1572.779317542" Nov 25 14:51:21 crc kubenswrapper[4879]: I1125 14:51:21.825526 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-gzfqq" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.152:5353: i/o timeout" Nov 25 14:51:22 crc kubenswrapper[4879]: I1125 14:51:22.153345 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 14:51:22 crc kubenswrapper[4879]: I1125 14:51:22.187737 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:23 crc kubenswrapper[4879]: I1125 14:51:23.026014 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-n2454" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:23 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:23 crc kubenswrapper[4879]: > Nov 25 14:51:23 crc kubenswrapper[4879]: I1125 14:51:23.306672 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 25 14:51:25 crc kubenswrapper[4879]: I1125 14:51:25.040651 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:51:25 crc kubenswrapper[4879]: I1125 14:51:25.115263 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5bf5d5964-kwtzr"] Nov 25 14:51:25 crc kubenswrapper[4879]: I1125 14:51:25.115559 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5bf5d5964-kwtzr" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" containerID="cri-o://6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7" gracePeriod=30 Nov 25 14:51:25 crc kubenswrapper[4879]: I1125 14:51:25.115695 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5bf5d5964-kwtzr" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api" containerID="cri-o://4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20" gracePeriod=30 Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.200665 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerStarted","Data":"8b0651b2a4445fb3a16e6609eeef910129ced4e588cd6908bbd07c6c888e7d31"} Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.201033 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.200827 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="proxy-httpd" containerID="cri-o://8b0651b2a4445fb3a16e6609eeef910129ced4e588cd6908bbd07c6c888e7d31" gracePeriod=30 Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.200815 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-central-agent" containerID="cri-o://faa92de00381134f57f76cfc91fabde2db6b607bff0fd5ef82014983b232a9fb" gracePeriod=30 Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.200831 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="sg-core" containerID="cri-o://c51c72ad6482c9685201169ad13a140393c0247ab057ae8f9ea66c4f0b992e3f" gracePeriod=30 Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.200860 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-notification-agent" containerID="cri-o://df6c1ccb73577cdb44c02dd9a9d0e9eb02b876ed7b84e67433ef1931821c1262" gracePeriod=30 Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.204661 4879 generic.go:334] "Generic (PLEG): container finished" podID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerID="6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7" exitCode=143 Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.204702 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bf5d5964-kwtzr" event={"ID":"5614ea68-0fbe-4773-802b-4e982dd987a5","Type":"ContainerDied","Data":"6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7"} Nov 25 14:51:26 crc kubenswrapper[4879]: I1125 14:51:26.227470 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.3879989249999998 podStartE2EDuration="30.22745605s" podCreationTimestamp="2025-11-25 14:50:56 +0000 UTC" firstStartedPulling="2025-11-25 14:50:57.740319938 +0000 UTC m=+1549.343733009" lastFinishedPulling="2025-11-25 14:51:24.579777063 +0000 UTC m=+1576.183190134" observedRunningTime="2025-11-25 14:51:26.225743164 +0000 UTC m=+1577.829156235" watchObservedRunningTime="2025-11-25 14:51:26.22745605 +0000 UTC m=+1577.830869121" Nov 25 14:51:27 crc kubenswrapper[4879]: I1125 14:51:27.216708 4879 generic.go:334] "Generic (PLEG): container finished" podID="ec415456-05f9-4430-ae5f-bf197f25e085" containerID="8b0651b2a4445fb3a16e6609eeef910129ced4e588cd6908bbd07c6c888e7d31" exitCode=0 Nov 25 14:51:27 crc kubenswrapper[4879]: I1125 14:51:27.216737 4879 generic.go:334] "Generic (PLEG): container finished" podID="ec415456-05f9-4430-ae5f-bf197f25e085" containerID="c51c72ad6482c9685201169ad13a140393c0247ab057ae8f9ea66c4f0b992e3f" exitCode=2 Nov 25 14:51:27 crc kubenswrapper[4879]: I1125 14:51:27.216746 4879 generic.go:334] "Generic (PLEG): container finished" podID="ec415456-05f9-4430-ae5f-bf197f25e085" containerID="faa92de00381134f57f76cfc91fabde2db6b607bff0fd5ef82014983b232a9fb" exitCode=0 Nov 25 14:51:27 crc kubenswrapper[4879]: I1125 14:51:27.216766 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerDied","Data":"8b0651b2a4445fb3a16e6609eeef910129ced4e588cd6908bbd07c6c888e7d31"} Nov 25 14:51:27 crc kubenswrapper[4879]: I1125 14:51:27.216790 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerDied","Data":"c51c72ad6482c9685201169ad13a140393c0247ab057ae8f9ea66c4f0b992e3f"} Nov 25 14:51:27 crc kubenswrapper[4879]: I1125 14:51:27.216799 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerDied","Data":"faa92de00381134f57f76cfc91fabde2db6b607bff0fd5ef82014983b232a9fb"} Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.228452 4879 generic.go:334] "Generic (PLEG): container finished" podID="ec415456-05f9-4430-ae5f-bf197f25e085" containerID="df6c1ccb73577cdb44c02dd9a9d0e9eb02b876ed7b84e67433ef1931821c1262" exitCode=0 Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.228532 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerDied","Data":"df6c1ccb73577cdb44c02dd9a9d0e9eb02b876ed7b84e67433ef1931821c1262"} Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.607588 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5bf5d5964-kwtzr" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": read tcp 10.217.0.2:57236->10.217.0.182:9311: read: connection reset by peer" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.607648 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5bf5d5964-kwtzr" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.182:9311/healthcheck\": read tcp 10.217.0.2:57238->10.217.0.182:9311: read: connection reset by peer" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.687683 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.722239 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.756889 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5fd554d876-nlf5m"] Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.757154 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5fd554d876-nlf5m" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-api" containerID="cri-o://7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a" gracePeriod=30 Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.757362 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5fd554d876-nlf5m" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-httpd" containerID="cri-o://f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d" gracePeriod=30 Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.786575 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.797138 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.880762 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-sg-core-conf-yaml\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.880812 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-config-data\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.880905 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-log-httpd\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.880936 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b7p5\" (UniqueName: \"kubernetes.io/projected/ec415456-05f9-4430-ae5f-bf197f25e085-kube-api-access-8b7p5\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.881082 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-run-httpd\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.881262 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-combined-ca-bundle\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.881313 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-scripts\") pod \"ec415456-05f9-4430-ae5f-bf197f25e085\" (UID: \"ec415456-05f9-4430-ae5f-bf197f25e085\") " Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.882342 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.883509 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.910314 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-scripts" (OuterVolumeSpecName: "scripts") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.924294 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec415456-05f9-4430-ae5f-bf197f25e085-kube-api-access-8b7p5" (OuterVolumeSpecName: "kube-api-access-8b7p5") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "kube-api-access-8b7p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.982085 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.983451 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.983475 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.983488 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.983501 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b7p5\" (UniqueName: \"kubernetes.io/projected/ec415456-05f9-4430-ae5f-bf197f25e085-kube-api-access-8b7p5\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:28 crc kubenswrapper[4879]: I1125 14:51:28.983512 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec415456-05f9-4430-ae5f-bf197f25e085-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.001973 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.074369 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-config-data" (OuterVolumeSpecName: "config-data") pod "ec415456-05f9-4430-ae5f-bf197f25e085" (UID: "ec415456-05f9-4430-ae5f-bf197f25e085"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.085225 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.085397 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec415456-05f9-4430-ae5f-bf197f25e085-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.224675 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.248730 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ec415456-05f9-4430-ae5f-bf197f25e085","Type":"ContainerDied","Data":"4f9475d0d198120914c0e043781774eaf8f02a71021391e84a032bf8ff6d9d2e"} Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.248783 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.248794 4879 scope.go:117] "RemoveContainer" containerID="8b0651b2a4445fb3a16e6609eeef910129ced4e588cd6908bbd07c6c888e7d31" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.255885 4879 generic.go:334] "Generic (PLEG): container finished" podID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerID="4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20" exitCode=0 Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.255932 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bf5d5964-kwtzr" event={"ID":"5614ea68-0fbe-4773-802b-4e982dd987a5","Type":"ContainerDied","Data":"4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20"} Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.255958 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5bf5d5964-kwtzr" event={"ID":"5614ea68-0fbe-4773-802b-4e982dd987a5","Type":"ContainerDied","Data":"d382750139818a7f50f7d0502759024e496c776a1a7c9bad5ac981a44db64c8a"} Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.256021 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5bf5d5964-kwtzr" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.286732 4879 scope.go:117] "RemoveContainer" containerID="c51c72ad6482c9685201169ad13a140393c0247ab057ae8f9ea66c4f0b992e3f" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.288091 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data-custom\") pod \"5614ea68-0fbe-4773-802b-4e982dd987a5\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.292720 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-combined-ca-bundle\") pod \"5614ea68-0fbe-4773-802b-4e982dd987a5\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.292895 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxfkn\" (UniqueName: \"kubernetes.io/projected/5614ea68-0fbe-4773-802b-4e982dd987a5-kube-api-access-xxfkn\") pod \"5614ea68-0fbe-4773-802b-4e982dd987a5\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.293360 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5614ea68-0fbe-4773-802b-4e982dd987a5-logs\") pod \"5614ea68-0fbe-4773-802b-4e982dd987a5\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.293457 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data\") pod \"5614ea68-0fbe-4773-802b-4e982dd987a5\" (UID: \"5614ea68-0fbe-4773-802b-4e982dd987a5\") " Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.297830 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5614ea68-0fbe-4773-802b-4e982dd987a5" (UID: "5614ea68-0fbe-4773-802b-4e982dd987a5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.298285 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5614ea68-0fbe-4773-802b-4e982dd987a5-logs" (OuterVolumeSpecName: "logs") pod "5614ea68-0fbe-4773-802b-4e982dd987a5" (UID: "5614ea68-0fbe-4773-802b-4e982dd987a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.299686 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5614ea68-0fbe-4773-802b-4e982dd987a5-kube-api-access-xxfkn" (OuterVolumeSpecName: "kube-api-access-xxfkn") pod "5614ea68-0fbe-4773-802b-4e982dd987a5" (UID: "5614ea68-0fbe-4773-802b-4e982dd987a5"). InnerVolumeSpecName "kube-api-access-xxfkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.316268 4879 scope.go:117] "RemoveContainer" containerID="df6c1ccb73577cdb44c02dd9a9d0e9eb02b876ed7b84e67433ef1931821c1262" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.340447 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5614ea68-0fbe-4773-802b-4e982dd987a5" (UID: "5614ea68-0fbe-4773-802b-4e982dd987a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.344142 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.376520 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.384348 4879 scope.go:117] "RemoveContainer" containerID="faa92de00381134f57f76cfc91fabde2db6b607bff0fd5ef82014983b232a9fb" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.395616 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data" (OuterVolumeSpecName: "config-data") pod "5614ea68-0fbe-4773-802b-4e982dd987a5" (UID: "5614ea68-0fbe-4773-802b-4e982dd987a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.397647 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.397683 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxfkn\" (UniqueName: \"kubernetes.io/projected/5614ea68-0fbe-4773-802b-4e982dd987a5-kube-api-access-xxfkn\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.397699 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5614ea68-0fbe-4773-802b-4e982dd987a5-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.397713 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.397722 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5614ea68-0fbe-4773-802b-4e982dd987a5-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.402010 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.402582 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.404395 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.404513 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="proxy-httpd" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.404587 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="proxy-httpd" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.404679 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-notification-agent" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.404756 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-notification-agent" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.404891 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="dnsmasq-dns" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.404997 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="dnsmasq-dns" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.405094 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.405180 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.405259 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="init" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.405313 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="init" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.405413 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-central-agent" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.405468 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-central-agent" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.405542 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="sg-core" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.405612 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="sg-core" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406016 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="proxy-httpd" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406115 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api-log" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406257 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-notification-agent" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406338 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="ceilometer-central-agent" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406423 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" containerName="barbican-api" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406507 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" containerName="sg-core" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.406591 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9da17217-b666-43b4-8061-84cdd4cedeac" containerName="dnsmasq-dns" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.409019 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.413023 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.413342 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.413611 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.423337 4879 scope.go:117] "RemoveContainer" containerID="4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499185 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-config-data\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499546 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499580 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-run-httpd\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499672 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-scripts\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499710 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d2m2\" (UniqueName: \"kubernetes.io/projected/03c753e2-f42e-404c-9d65-e46cc370997d-kube-api-access-5d2m2\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499758 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-log-httpd\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.499818 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.518372 4879 scope.go:117] "RemoveContainer" containerID="6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.577093 4879 scope.go:117] "RemoveContainer" containerID="4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.577608 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20\": container with ID starting with 4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20 not found: ID does not exist" containerID="4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.577708 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20"} err="failed to get container status \"4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20\": rpc error: code = NotFound desc = could not find container \"4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20\": container with ID starting with 4e8a8d088d51bfad06fc6265efa363353d62e29fda8fc58019eaa6e2258f2c20 not found: ID does not exist" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.577783 4879 scope.go:117] "RemoveContainer" containerID="6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7" Nov 25 14:51:29 crc kubenswrapper[4879]: E1125 14:51:29.577998 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7\": container with ID starting with 6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7 not found: ID does not exist" containerID="6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.578087 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7"} err="failed to get container status \"6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7\": rpc error: code = NotFound desc = could not find container \"6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7\": container with ID starting with 6ad39b409c0db5e2e7c41cbfff5e196918e59bf5adc58beea61bbb0c40fb4eb7 not found: ID does not exist" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602482 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-config-data\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602524 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602545 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-run-httpd\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602616 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-scripts\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602643 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d2m2\" (UniqueName: \"kubernetes.io/projected/03c753e2-f42e-404c-9d65-e46cc370997d-kube-api-access-5d2m2\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602679 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-log-httpd\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.602716 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.603691 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-run-httpd\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.604048 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-log-httpd\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.609111 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.609365 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-config-data\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.609864 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.610809 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-scripts\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.628521 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d2m2\" (UniqueName: \"kubernetes.io/projected/03c753e2-f42e-404c-9d65-e46cc370997d-kube-api-access-5d2m2\") pod \"ceilometer-0\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " pod="openstack/ceilometer-0" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.633168 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5bf5d5964-kwtzr"] Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.639918 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5bf5d5964-kwtzr"] Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.658964 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5614ea68-0fbe-4773-802b-4e982dd987a5" path="/var/lib/kubelet/pods/5614ea68-0fbe-4773-802b-4e982dd987a5/volumes" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.659675 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec415456-05f9-4430-ae5f-bf197f25e085" path="/var/lib/kubelet/pods/ec415456-05f9-4430-ae5f-bf197f25e085/volumes" Nov 25 14:51:29 crc kubenswrapper[4879]: I1125 14:51:29.746020 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:30 crc kubenswrapper[4879]: I1125 14:51:30.150017 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:30 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:30 crc kubenswrapper[4879]: > Nov 25 14:51:30 crc kubenswrapper[4879]: I1125 14:51:30.254063 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:30 crc kubenswrapper[4879]: W1125 14:51:30.259196 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03c753e2_f42e_404c_9d65_e46cc370997d.slice/crio-480baa3782dc970c4cfc1a82f7cf52f5b11bb406d6a7a53f4b7f387b5c428f10 WatchSource:0}: Error finding container 480baa3782dc970c4cfc1a82f7cf52f5b11bb406d6a7a53f4b7f387b5c428f10: Status 404 returned error can't find the container with id 480baa3782dc970c4cfc1a82f7cf52f5b11bb406d6a7a53f4b7f387b5c428f10 Nov 25 14:51:30 crc kubenswrapper[4879]: I1125 14:51:30.273114 4879 generic.go:334] "Generic (PLEG): container finished" podID="5cde15fa-ca05-443f-b71d-365e01384142" containerID="f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d" exitCode=0 Nov 25 14:51:30 crc kubenswrapper[4879]: I1125 14:51:30.273207 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd554d876-nlf5m" event={"ID":"5cde15fa-ca05-443f-b71d-365e01384142","Type":"ContainerDied","Data":"f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d"} Nov 25 14:51:31 crc kubenswrapper[4879]: I1125 14:51:31.043698 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:31 crc kubenswrapper[4879]: I1125 14:51:31.287166 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerStarted","Data":"84b6a2ca23202770c56d86c1035c27872867bfe1a3b79394726ddc3841987f01"} Nov 25 14:51:31 crc kubenswrapper[4879]: I1125 14:51:31.287492 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerStarted","Data":"480baa3782dc970c4cfc1a82f7cf52f5b11bb406d6a7a53f4b7f387b5c428f10"} Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.220916 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.301665 4879 generic.go:334] "Generic (PLEG): container finished" podID="5cde15fa-ca05-443f-b71d-365e01384142" containerID="7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a" exitCode=0 Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.301722 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd554d876-nlf5m" event={"ID":"5cde15fa-ca05-443f-b71d-365e01384142","Type":"ContainerDied","Data":"7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a"} Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.301755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd554d876-nlf5m" event={"ID":"5cde15fa-ca05-443f-b71d-365e01384142","Type":"ContainerDied","Data":"ace7b2b72ae1f9f7d46b15a08f7375bee4ae4be1fda480a863e85408b03d4d59"} Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.301779 4879 scope.go:117] "RemoveContainer" containerID="f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.302421 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5fd554d876-nlf5m" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.318669 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fkzv\" (UniqueName: \"kubernetes.io/projected/5cde15fa-ca05-443f-b71d-365e01384142-kube-api-access-7fkzv\") pod \"5cde15fa-ca05-443f-b71d-365e01384142\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.320167 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-ovndb-tls-certs\") pod \"5cde15fa-ca05-443f-b71d-365e01384142\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.320330 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-httpd-config\") pod \"5cde15fa-ca05-443f-b71d-365e01384142\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.320405 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-combined-ca-bundle\") pod \"5cde15fa-ca05-443f-b71d-365e01384142\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.320722 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-config\") pod \"5cde15fa-ca05-443f-b71d-365e01384142\" (UID: \"5cde15fa-ca05-443f-b71d-365e01384142\") " Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.346367 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "5cde15fa-ca05-443f-b71d-365e01384142" (UID: "5cde15fa-ca05-443f-b71d-365e01384142"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.346419 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cde15fa-ca05-443f-b71d-365e01384142-kube-api-access-7fkzv" (OuterVolumeSpecName: "kube-api-access-7fkzv") pod "5cde15fa-ca05-443f-b71d-365e01384142" (UID: "5cde15fa-ca05-443f-b71d-365e01384142"). InnerVolumeSpecName "kube-api-access-7fkzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.346486 4879 scope.go:117] "RemoveContainer" containerID="7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.398565 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-config" (OuterVolumeSpecName: "config") pod "5cde15fa-ca05-443f-b71d-365e01384142" (UID: "5cde15fa-ca05-443f-b71d-365e01384142"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.413405 4879 scope.go:117] "RemoveContainer" containerID="f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.413502 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cde15fa-ca05-443f-b71d-365e01384142" (UID: "5cde15fa-ca05-443f-b71d-365e01384142"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:32 crc kubenswrapper[4879]: E1125 14:51:32.416094 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d\": container with ID starting with f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d not found: ID does not exist" containerID="f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.416194 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d"} err="failed to get container status \"f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d\": rpc error: code = NotFound desc = could not find container \"f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d\": container with ID starting with f4118bd9bc8a230eced78fbf9efd91c1d31f99170492f167da65bade00e1fe2d not found: ID does not exist" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.416254 4879 scope.go:117] "RemoveContainer" containerID="7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a" Nov 25 14:51:32 crc kubenswrapper[4879]: E1125 14:51:32.418080 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a\": container with ID starting with 7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a not found: ID does not exist" containerID="7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.418209 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a"} err="failed to get container status \"7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a\": rpc error: code = NotFound desc = could not find container \"7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a\": container with ID starting with 7c517ab670e574e8f2c3e678db0e37e367f1908de6f48dacab96f9362c976b5a not found: ID does not exist" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.426212 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.426264 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.426284 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.426296 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fkzv\" (UniqueName: \"kubernetes.io/projected/5cde15fa-ca05-443f-b71d-365e01384142-kube-api-access-7fkzv\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.434627 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "5cde15fa-ca05-443f-b71d-365e01384142" (UID: "5cde15fa-ca05-443f-b71d-365e01384142"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.528352 4879 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cde15fa-ca05-443f-b71d-365e01384142-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.639499 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5fd554d876-nlf5m"] Nov 25 14:51:32 crc kubenswrapper[4879]: I1125 14:51:32.648785 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5fd554d876-nlf5m"] Nov 25 14:51:33 crc kubenswrapper[4879]: I1125 14:51:33.110607 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-n2454" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:33 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:33 crc kubenswrapper[4879]: > Nov 25 14:51:33 crc kubenswrapper[4879]: I1125 14:51:33.313813 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerStarted","Data":"63f4b205ecd54c9bcf210196a87df986e2472d90896a51e546fd5c05d2574473"} Nov 25 14:51:33 crc kubenswrapper[4879]: I1125 14:51:33.313858 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerStarted","Data":"43d6def0377bafb29b5c10eb1df77b870221fb7bd30f2eaad8a2c95a040138a3"} Nov 25 14:51:33 crc kubenswrapper[4879]: I1125 14:51:33.658258 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cde15fa-ca05-443f-b71d-365e01384142" path="/var/lib/kubelet/pods/5cde15fa-ca05-443f-b71d-365e01384142/volumes" Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.354420 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" event={"ID":"dc3c22fd-7723-4f81-af93-2cf3a150cd08","Type":"ContainerStarted","Data":"3d6f5969e21c36bba7f2b94891e4cd1f3063118e9a14bb247b5fc957140227ea"} Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.357854 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerStarted","Data":"8b208ff4a68234006814d15aa7c752fd6ae2e91cfa74299335aaeedfc96da722"} Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.358057 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.358114 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="sg-core" containerID="cri-o://63f4b205ecd54c9bcf210196a87df986e2472d90896a51e546fd5c05d2574473" gracePeriod=30 Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.358069 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="proxy-httpd" containerID="cri-o://8b208ff4a68234006814d15aa7c752fd6ae2e91cfa74299335aaeedfc96da722" gracePeriod=30 Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.358025 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-central-agent" containerID="cri-o://84b6a2ca23202770c56d86c1035c27872867bfe1a3b79394726ddc3841987f01" gracePeriod=30 Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.358097 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-notification-agent" containerID="cri-o://43d6def0377bafb29b5c10eb1df77b870221fb7bd30f2eaad8a2c95a040138a3" gracePeriod=30 Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.382236 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" podStartSLOduration=3.108663821 podStartE2EDuration="44.382209561s" podCreationTimestamp="2025-11-25 14:50:52 +0000 UTC" firstStartedPulling="2025-11-25 14:50:53.839736414 +0000 UTC m=+1545.443149495" lastFinishedPulling="2025-11-25 14:51:35.113282164 +0000 UTC m=+1586.716695235" observedRunningTime="2025-11-25 14:51:36.379201071 +0000 UTC m=+1587.982614142" watchObservedRunningTime="2025-11-25 14:51:36.382209561 +0000 UTC m=+1587.985622642" Nov 25 14:51:36 crc kubenswrapper[4879]: I1125 14:51:36.408949 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.517713215 podStartE2EDuration="7.408930247s" podCreationTimestamp="2025-11-25 14:51:29 +0000 UTC" firstStartedPulling="2025-11-25 14:51:30.261236112 +0000 UTC m=+1581.864649183" lastFinishedPulling="2025-11-25 14:51:35.152453144 +0000 UTC m=+1586.755866215" observedRunningTime="2025-11-25 14:51:36.400936513 +0000 UTC m=+1588.004349584" watchObservedRunningTime="2025-11-25 14:51:36.408930247 +0000 UTC m=+1588.012343318" Nov 25 14:51:37 crc kubenswrapper[4879]: I1125 14:51:37.371805 4879 generic.go:334] "Generic (PLEG): container finished" podID="03c753e2-f42e-404c-9d65-e46cc370997d" containerID="8b208ff4a68234006814d15aa7c752fd6ae2e91cfa74299335aaeedfc96da722" exitCode=0 Nov 25 14:51:37 crc kubenswrapper[4879]: I1125 14:51:37.371841 4879 generic.go:334] "Generic (PLEG): container finished" podID="03c753e2-f42e-404c-9d65-e46cc370997d" containerID="63f4b205ecd54c9bcf210196a87df986e2472d90896a51e546fd5c05d2574473" exitCode=2 Nov 25 14:51:37 crc kubenswrapper[4879]: I1125 14:51:37.371848 4879 generic.go:334] "Generic (PLEG): container finished" podID="03c753e2-f42e-404c-9d65-e46cc370997d" containerID="43d6def0377bafb29b5c10eb1df77b870221fb7bd30f2eaad8a2c95a040138a3" exitCode=0 Nov 25 14:51:37 crc kubenswrapper[4879]: I1125 14:51:37.371878 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerDied","Data":"8b208ff4a68234006814d15aa7c752fd6ae2e91cfa74299335aaeedfc96da722"} Nov 25 14:51:37 crc kubenswrapper[4879]: I1125 14:51:37.371936 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerDied","Data":"63f4b205ecd54c9bcf210196a87df986e2472d90896a51e546fd5c05d2574473"} Nov 25 14:51:37 crc kubenswrapper[4879]: I1125 14:51:37.371951 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerDied","Data":"43d6def0377bafb29b5c10eb1df77b870221fb7bd30f2eaad8a2c95a040138a3"} Nov 25 14:51:40 crc kubenswrapper[4879]: I1125 14:51:40.120879 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:40 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:40 crc kubenswrapper[4879]: > Nov 25 14:51:42 crc kubenswrapper[4879]: I1125 14:51:42.034810 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:51:42 crc kubenswrapper[4879]: I1125 14:51:42.083734 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:51:42 crc kubenswrapper[4879]: I1125 14:51:42.286512 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2454"] Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.434396 4879 generic.go:334] "Generic (PLEG): container finished" podID="03c753e2-f42e-404c-9d65-e46cc370997d" containerID="84b6a2ca23202770c56d86c1035c27872867bfe1a3b79394726ddc3841987f01" exitCode=0 Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.434485 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerDied","Data":"84b6a2ca23202770c56d86c1035c27872867bfe1a3b79394726ddc3841987f01"} Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.434891 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03c753e2-f42e-404c-9d65-e46cc370997d","Type":"ContainerDied","Data":"480baa3782dc970c4cfc1a82f7cf52f5b11bb406d6a7a53f4b7f387b5c428f10"} Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.434910 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="480baa3782dc970c4cfc1a82f7cf52f5b11bb406d6a7a53f4b7f387b5c428f10" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.435023 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n2454" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" containerID="cri-o://7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00" gracePeriod=2 Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.441286 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589562 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-config-data\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589622 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-scripts\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589660 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5d2m2\" (UniqueName: \"kubernetes.io/projected/03c753e2-f42e-404c-9d65-e46cc370997d-kube-api-access-5d2m2\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589707 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-run-httpd\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589748 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-sg-core-conf-yaml\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589772 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-combined-ca-bundle\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.589961 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-log-httpd\") pod \"03c753e2-f42e-404c-9d65-e46cc370997d\" (UID: \"03c753e2-f42e-404c-9d65-e46cc370997d\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.590339 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.590452 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.590746 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.595838 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-scripts" (OuterVolumeSpecName: "scripts") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.596604 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03c753e2-f42e-404c-9d65-e46cc370997d-kube-api-access-5d2m2" (OuterVolumeSpecName: "kube-api-access-5d2m2") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "kube-api-access-5d2m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.625820 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.685286 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.692063 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03c753e2-f42e-404c-9d65-e46cc370997d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.692096 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.692109 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5d2m2\" (UniqueName: \"kubernetes.io/projected/03c753e2-f42e-404c-9d65-e46cc370997d-kube-api-access-5d2m2\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.692134 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.692145 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.703428 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-config-data" (OuterVolumeSpecName: "config-data") pod "03c753e2-f42e-404c-9d65-e46cc370997d" (UID: "03c753e2-f42e-404c-9d65-e46cc370997d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.794560 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03c753e2-f42e-404c-9d65-e46cc370997d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.889636 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.999402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-utilities\") pod \"bb15aafc-57e7-4069-be26-b70743c103e3\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.999486 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-catalog-content\") pod \"bb15aafc-57e7-4069-be26-b70743c103e3\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " Nov 25 14:51:43 crc kubenswrapper[4879]: I1125 14:51:43.999546 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb4fc\" (UniqueName: \"kubernetes.io/projected/bb15aafc-57e7-4069-be26-b70743c103e3-kube-api-access-vb4fc\") pod \"bb15aafc-57e7-4069-be26-b70743c103e3\" (UID: \"bb15aafc-57e7-4069-be26-b70743c103e3\") " Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.000925 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-utilities" (OuterVolumeSpecName: "utilities") pod "bb15aafc-57e7-4069-be26-b70743c103e3" (UID: "bb15aafc-57e7-4069-be26-b70743c103e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.037410 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.049375 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb15aafc-57e7-4069-be26-b70743c103e3-kube-api-access-vb4fc" (OuterVolumeSpecName: "kube-api-access-vb4fc") pod "bb15aafc-57e7-4069-be26-b70743c103e3" (UID: "bb15aafc-57e7-4069-be26-b70743c103e3"). InnerVolumeSpecName "kube-api-access-vb4fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.096514 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb15aafc-57e7-4069-be26-b70743c103e3" (UID: "bb15aafc-57e7-4069-be26-b70743c103e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.142056 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb4fc\" (UniqueName: \"kubernetes.io/projected/bb15aafc-57e7-4069-be26-b70743c103e3-kube-api-access-vb4fc\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.142097 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb15aafc-57e7-4069-be26-b70743c103e3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.445512 4879 generic.go:334] "Generic (PLEG): container finished" podID="bb15aafc-57e7-4069-be26-b70743c103e3" containerID="7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00" exitCode=0 Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.446809 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n2454" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.449192 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2454" event={"ID":"bb15aafc-57e7-4069-be26-b70743c103e3","Type":"ContainerDied","Data":"7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00"} Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.449243 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n2454" event={"ID":"bb15aafc-57e7-4069-be26-b70743c103e3","Type":"ContainerDied","Data":"a95b0ea63752169fed893aea3ba92459e9dd4505cce99e63a0257b5d3f6c8da7"} Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.449266 4879 scope.go:117] "RemoveContainer" containerID="7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.449392 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.485116 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.497502 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.500945 4879 scope.go:117] "RemoveContainer" containerID="d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.520817 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2454"] Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.544402 4879 scope.go:117] "RemoveContainer" containerID="151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547078 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547642 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-api" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547668 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-api" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547682 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="proxy-httpd" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547691 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="proxy-httpd" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547702 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547711 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547727 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-notification-agent" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547734 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-notification-agent" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547766 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-central-agent" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547774 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-central-agent" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547792 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="extract-utilities" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547800 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="extract-utilities" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547816 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="sg-core" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547824 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="sg-core" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547835 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="extract-content" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547842 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="extract-content" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.547854 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-httpd" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.547861 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-httpd" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548088 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-httpd" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548105 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="proxy-httpd" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548140 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="sg-core" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548160 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-notification-agent" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548187 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cde15fa-ca05-443f-b71d-365e01384142" containerName="neutron-api" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548201 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" containerName="ceilometer-central-agent" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.548215 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" containerName="registry-server" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.550324 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.552846 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.553103 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554029 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554168 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-scripts\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554243 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-run-httpd\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554299 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-log-httpd\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554382 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-config-data\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554411 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.554442 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtmm5\" (UniqueName: \"kubernetes.io/projected/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-kube-api-access-wtmm5\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.563037 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n2454"] Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.575079 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.593137 4879 scope.go:117] "RemoveContainer" containerID="7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.593621 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00\": container with ID starting with 7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00 not found: ID does not exist" containerID="7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.593644 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00"} err="failed to get container status \"7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00\": rpc error: code = NotFound desc = could not find container \"7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00\": container with ID starting with 7f8b15cc476742177e072f1381e6cf23e339e3e8f624084098770c275948cc00 not found: ID does not exist" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.593664 4879 scope.go:117] "RemoveContainer" containerID="d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.594020 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d\": container with ID starting with d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d not found: ID does not exist" containerID="d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.594040 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d"} err="failed to get container status \"d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d\": rpc error: code = NotFound desc = could not find container \"d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d\": container with ID starting with d595caa1c0c1ce6d0b7f0755e88b9e4a589b9e106b27d3419bc4a5728ae2f50d not found: ID does not exist" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.594052 4879 scope.go:117] "RemoveContainer" containerID="151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3" Nov 25 14:51:44 crc kubenswrapper[4879]: E1125 14:51:44.594324 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3\": container with ID starting with 151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3 not found: ID does not exist" containerID="151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.594347 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3"} err="failed to get container status \"151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3\": rpc error: code = NotFound desc = could not find container \"151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3\": container with ID starting with 151f267821d6bdff3f1bacb03660f7b5b42a93a715a02d9b7cac8584c71961f3 not found: ID does not exist" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.656103 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.656337 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-scripts\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.656839 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-run-httpd\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.656913 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-log-httpd\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.657033 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-config-data\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.657057 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.657082 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtmm5\" (UniqueName: \"kubernetes.io/projected/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-kube-api-access-wtmm5\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.657333 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-run-httpd\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.657647 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-log-httpd\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.661424 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-scripts\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.662098 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-config-data\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.662986 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.663108 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.679770 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtmm5\" (UniqueName: \"kubernetes.io/projected/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-kube-api-access-wtmm5\") pod \"ceilometer-0\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " pod="openstack/ceilometer-0" Nov 25 14:51:44 crc kubenswrapper[4879]: I1125 14:51:44.935020 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:45 crc kubenswrapper[4879]: I1125 14:51:45.409019 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:45 crc kubenswrapper[4879]: I1125 14:51:45.455473 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerStarted","Data":"b47704ea5236133ec31b8d92ea2a07fbaaa6dbba7ce7d8830034f6430be4c68a"} Nov 25 14:51:45 crc kubenswrapper[4879]: I1125 14:51:45.494954 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:45 crc kubenswrapper[4879]: I1125 14:51:45.654704 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03c753e2-f42e-404c-9d65-e46cc370997d" path="/var/lib/kubelet/pods/03c753e2-f42e-404c-9d65-e46cc370997d/volumes" Nov 25 14:51:45 crc kubenswrapper[4879]: I1125 14:51:45.655709 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb15aafc-57e7-4069-be26-b70743c103e3" path="/var/lib/kubelet/pods/bb15aafc-57e7-4069-be26-b70743c103e3/volumes" Nov 25 14:51:47 crc kubenswrapper[4879]: I1125 14:51:47.409324 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:51:47 crc kubenswrapper[4879]: I1125 14:51:47.409946 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:51:47 crc kubenswrapper[4879]: I1125 14:51:47.478813 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerStarted","Data":"d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25"} Nov 25 14:51:47 crc kubenswrapper[4879]: I1125 14:51:47.478956 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerStarted","Data":"08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7"} Nov 25 14:51:48 crc kubenswrapper[4879]: I1125 14:51:48.490627 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerStarted","Data":"c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809"} Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.120196 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" probeResult="failure" output=< Nov 25 14:51:50 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 14:51:50 crc kubenswrapper[4879]: > Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.525430 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerStarted","Data":"0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994"} Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.526760 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-central-agent" containerID="cri-o://08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7" gracePeriod=30 Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.527096 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.527964 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="proxy-httpd" containerID="cri-o://0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994" gracePeriod=30 Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.528143 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-notification-agent" containerID="cri-o://d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25" gracePeriod=30 Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.528325 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="sg-core" containerID="cri-o://c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809" gracePeriod=30 Nov 25 14:51:50 crc kubenswrapper[4879]: I1125 14:51:50.555101 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.252432688 podStartE2EDuration="6.555080896s" podCreationTimestamp="2025-11-25 14:51:44 +0000 UTC" firstStartedPulling="2025-11-25 14:51:45.416874055 +0000 UTC m=+1597.020287116" lastFinishedPulling="2025-11-25 14:51:49.719522253 +0000 UTC m=+1601.322935324" observedRunningTime="2025-11-25 14:51:50.548413637 +0000 UTC m=+1602.151826708" watchObservedRunningTime="2025-11-25 14:51:50.555080896 +0000 UTC m=+1602.158493967" Nov 25 14:51:51 crc kubenswrapper[4879]: I1125 14:51:51.536480 4879 generic.go:334] "Generic (PLEG): container finished" podID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerID="0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994" exitCode=0 Nov 25 14:51:51 crc kubenswrapper[4879]: I1125 14:51:51.536914 4879 generic.go:334] "Generic (PLEG): container finished" podID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerID="c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809" exitCode=2 Nov 25 14:51:51 crc kubenswrapper[4879]: I1125 14:51:51.536941 4879 generic.go:334] "Generic (PLEG): container finished" podID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerID="d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25" exitCode=0 Nov 25 14:51:51 crc kubenswrapper[4879]: I1125 14:51:51.536645 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerDied","Data":"0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994"} Nov 25 14:51:51 crc kubenswrapper[4879]: I1125 14:51:51.536976 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerDied","Data":"c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809"} Nov 25 14:51:51 crc kubenswrapper[4879]: I1125 14:51:51.536991 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerDied","Data":"d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25"} Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.466142 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.564487 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-scripts\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.564547 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-combined-ca-bundle\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.564590 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-config-data\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.564631 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-sg-core-conf-yaml\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.564681 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-run-httpd\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.564866 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-log-httpd\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.565046 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtmm5\" (UniqueName: \"kubernetes.io/projected/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-kube-api-access-wtmm5\") pod \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\" (UID: \"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1\") " Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.565178 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.565369 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.565940 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.566018 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.570887 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-scripts" (OuterVolumeSpecName: "scripts") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.580523 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-kube-api-access-wtmm5" (OuterVolumeSpecName: "kube-api-access-wtmm5") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "kube-api-access-wtmm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.581478 4879 generic.go:334] "Generic (PLEG): container finished" podID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerID="08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7" exitCode=0 Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.581520 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerDied","Data":"08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7"} Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.581551 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1","Type":"ContainerDied","Data":"b47704ea5236133ec31b8d92ea2a07fbaaa6dbba7ce7d8830034f6430be4c68a"} Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.581742 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.582327 4879 scope.go:117] "RemoveContainer" containerID="0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.602164 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.646649 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.667564 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtmm5\" (UniqueName: \"kubernetes.io/projected/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-kube-api-access-wtmm5\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.667908 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.667992 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.668075 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.688175 4879 scope.go:117] "RemoveContainer" containerID="c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.705004 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-config-data" (OuterVolumeSpecName: "config-data") pod "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" (UID: "eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.710308 4879 scope.go:117] "RemoveContainer" containerID="d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.735755 4879 scope.go:117] "RemoveContainer" containerID="08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.755379 4879 scope.go:117] "RemoveContainer" containerID="0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.755729 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994\": container with ID starting with 0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994 not found: ID does not exist" containerID="0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.755775 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994"} err="failed to get container status \"0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994\": rpc error: code = NotFound desc = could not find container \"0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994\": container with ID starting with 0915889d2541c565a994aba94f6916af70eb8d7a1d01e3f58152a156e31fd994 not found: ID does not exist" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.755795 4879 scope.go:117] "RemoveContainer" containerID="c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.756023 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809\": container with ID starting with c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809 not found: ID does not exist" containerID="c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.756157 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809"} err="failed to get container status \"c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809\": rpc error: code = NotFound desc = could not find container \"c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809\": container with ID starting with c86a23dd50191b371e4b68ce2d6349fc3e127852a7bb801e8fff10f68d852809 not found: ID does not exist" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.756236 4879 scope.go:117] "RemoveContainer" containerID="d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.756815 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25\": container with ID starting with d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25 not found: ID does not exist" containerID="d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.756838 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25"} err="failed to get container status \"d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25\": rpc error: code = NotFound desc = could not find container \"d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25\": container with ID starting with d95ac166a0524d5d1026f0722a73e0b967f89177a5ad0be619219482da177f25 not found: ID does not exist" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.756862 4879 scope.go:117] "RemoveContainer" containerID="08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.757260 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7\": container with ID starting with 08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7 not found: ID does not exist" containerID="08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.757371 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7"} err="failed to get container status \"08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7\": rpc error: code = NotFound desc = could not find container \"08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7\": container with ID starting with 08ef8ea90a0c4f1a09271569d6d5f66b97b3b350ff8fdea59dc11bbb982c61e7 not found: ID does not exist" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.770171 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.925418 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.935064 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975088 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.975495 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="sg-core" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975514 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="sg-core" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.975533 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="proxy-httpd" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975538 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="proxy-httpd" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.975554 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-central-agent" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975561 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-central-agent" Nov 25 14:51:54 crc kubenswrapper[4879]: E1125 14:51:54.975571 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-notification-agent" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975578 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-notification-agent" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975809 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="proxy-httpd" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975829 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-notification-agent" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975848 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="sg-core" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.975866 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" containerName="ceilometer-central-agent" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.977595 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.980364 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:51:54 crc kubenswrapper[4879]: I1125 14:51:54.980845 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.078758 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.183914 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5s69\" (UniqueName: \"kubernetes.io/projected/34855c13-ac2b-491b-a376-8d1c3be6344f-kube-api-access-r5s69\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.183994 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.184035 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-run-httpd\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.184297 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-log-httpd\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.184468 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.184600 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-scripts\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.184774 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-config-data\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286280 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286354 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-run-httpd\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286404 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-log-httpd\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286567 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286616 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-scripts\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286673 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-config-data\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.286723 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5s69\" (UniqueName: \"kubernetes.io/projected/34855c13-ac2b-491b-a376-8d1c3be6344f-kube-api-access-r5s69\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.287187 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-log-httpd\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.287187 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-run-httpd\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.291433 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.293225 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-config-data\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.293829 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-scripts\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.294492 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.303458 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5s69\" (UniqueName: \"kubernetes.io/projected/34855c13-ac2b-491b-a376-8d1c3be6344f-kube-api-access-r5s69\") pod \"ceilometer-0\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.597700 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:51:55 crc kubenswrapper[4879]: I1125 14:51:55.657823 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1" path="/var/lib/kubelet/pods/eec3af0a-bc9e-4de3-b37c-2f5adc4ec7c1/volumes" Nov 25 14:51:56 crc kubenswrapper[4879]: I1125 14:51:56.059779 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:51:56 crc kubenswrapper[4879]: I1125 14:51:56.602191 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerStarted","Data":"c3e448cbd2a3ea7e53bc445493f42f6bf1e17c2ed7e97b9d4ed9e17be4c31f98"} Nov 25 14:51:57 crc kubenswrapper[4879]: I1125 14:51:57.614744 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerStarted","Data":"a2b9ee4547691ec96e6670750b2cc32a19928ecd5cdbcd17afe8a1ee739c36a5"} Nov 25 14:51:58 crc kubenswrapper[4879]: I1125 14:51:58.627474 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerStarted","Data":"5d9f8f38b13f1bf688276aaa06f843d0971b138454b14935ac8abfccf5af3f42"} Nov 25 14:51:59 crc kubenswrapper[4879]: I1125 14:51:59.124758 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:51:59 crc kubenswrapper[4879]: I1125 14:51:59.194985 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:51:59 crc kubenswrapper[4879]: I1125 14:51:59.639515 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerStarted","Data":"aea4ab921de625d2f2a93b9ba0a16b733f6e9d8774bbf98822ba8a9f57472f6b"} Nov 25 14:51:59 crc kubenswrapper[4879]: I1125 14:51:59.974077 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cws7f"] Nov 25 14:52:00 crc kubenswrapper[4879]: I1125 14:52:00.649397 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cws7f" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" containerID="cri-o://9343436781b61bd3ebd9153984239f2fc0376be9027802ac40c4803a93fa4db4" gracePeriod=2 Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.666057 4879 generic.go:334] "Generic (PLEG): container finished" podID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerID="9343436781b61bd3ebd9153984239f2fc0376be9027802ac40c4803a93fa4db4" exitCode=0 Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.666167 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cws7f" event={"ID":"78bf95c7-c5cc-4ea8-80de-f76070054d12","Type":"ContainerDied","Data":"9343436781b61bd3ebd9153984239f2fc0376be9027802ac40c4803a93fa4db4"} Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.666485 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cws7f" event={"ID":"78bf95c7-c5cc-4ea8-80de-f76070054d12","Type":"ContainerDied","Data":"eb6fa7c645d3a05c1cf1893b5be33425c236e85cd844d3e70a613d932caf3418"} Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.666507 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb6fa7c645d3a05c1cf1893b5be33425c236e85cd844d3e70a613d932caf3418" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.671084 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerStarted","Data":"4a17aaaaaf436334d2e852901462c2d7dc9e508b8bf0cfa92d0694494a823150"} Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.671475 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.692243 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.486761061 podStartE2EDuration="7.692227735s" podCreationTimestamp="2025-11-25 14:51:54 +0000 UTC" firstStartedPulling="2025-11-25 14:51:56.061734951 +0000 UTC m=+1607.665148022" lastFinishedPulling="2025-11-25 14:52:01.267201485 +0000 UTC m=+1612.870614696" observedRunningTime="2025-11-25 14:52:01.690511059 +0000 UTC m=+1613.293924130" watchObservedRunningTime="2025-11-25 14:52:01.692227735 +0000 UTC m=+1613.295640806" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.695912 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.816783 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbvmj\" (UniqueName: \"kubernetes.io/projected/78bf95c7-c5cc-4ea8-80de-f76070054d12-kube-api-access-kbvmj\") pod \"78bf95c7-c5cc-4ea8-80de-f76070054d12\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.817732 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-catalog-content\") pod \"78bf95c7-c5cc-4ea8-80de-f76070054d12\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.818180 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-utilities\") pod \"78bf95c7-c5cc-4ea8-80de-f76070054d12\" (UID: \"78bf95c7-c5cc-4ea8-80de-f76070054d12\") " Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.819068 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-utilities" (OuterVolumeSpecName: "utilities") pod "78bf95c7-c5cc-4ea8-80de-f76070054d12" (UID: "78bf95c7-c5cc-4ea8-80de-f76070054d12"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.821971 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78bf95c7-c5cc-4ea8-80de-f76070054d12-kube-api-access-kbvmj" (OuterVolumeSpecName: "kube-api-access-kbvmj") pod "78bf95c7-c5cc-4ea8-80de-f76070054d12" (UID: "78bf95c7-c5cc-4ea8-80de-f76070054d12"). InnerVolumeSpecName "kube-api-access-kbvmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.866942 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78bf95c7-c5cc-4ea8-80de-f76070054d12" (UID: "78bf95c7-c5cc-4ea8-80de-f76070054d12"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.919730 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.919763 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbvmj\" (UniqueName: \"kubernetes.io/projected/78bf95c7-c5cc-4ea8-80de-f76070054d12-kube-api-access-kbvmj\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:01 crc kubenswrapper[4879]: I1125 14:52:01.919773 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78bf95c7-c5cc-4ea8-80de-f76070054d12-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:02 crc kubenswrapper[4879]: I1125 14:52:02.680355 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cws7f" Nov 25 14:52:02 crc kubenswrapper[4879]: I1125 14:52:02.740081 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cws7f"] Nov 25 14:52:02 crc kubenswrapper[4879]: I1125 14:52:02.750088 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cws7f"] Nov 25 14:52:03 crc kubenswrapper[4879]: I1125 14:52:03.661308 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" path="/var/lib/kubelet/pods/78bf95c7-c5cc-4ea8-80de-f76070054d12/volumes" Nov 25 14:52:17 crc kubenswrapper[4879]: I1125 14:52:17.409218 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:52:17 crc kubenswrapper[4879]: I1125 14:52:17.410053 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:52:18 crc kubenswrapper[4879]: I1125 14:52:18.842057 4879 generic.go:334] "Generic (PLEG): container finished" podID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" containerID="3d6f5969e21c36bba7f2b94891e4cd1f3063118e9a14bb247b5fc957140227ea" exitCode=0 Nov 25 14:52:18 crc kubenswrapper[4879]: I1125 14:52:18.842101 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" event={"ID":"dc3c22fd-7723-4f81-af93-2cf3a150cd08","Type":"ContainerDied","Data":"3d6f5969e21c36bba7f2b94891e4cd1f3063118e9a14bb247b5fc957140227ea"} Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.212559 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.381492 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-combined-ca-bundle\") pod \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.381623 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66g2w\" (UniqueName: \"kubernetes.io/projected/dc3c22fd-7723-4f81-af93-2cf3a150cd08-kube-api-access-66g2w\") pod \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.381714 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-scripts\") pod \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.381785 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-config-data\") pod \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\" (UID: \"dc3c22fd-7723-4f81-af93-2cf3a150cd08\") " Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.390580 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc3c22fd-7723-4f81-af93-2cf3a150cd08-kube-api-access-66g2w" (OuterVolumeSpecName: "kube-api-access-66g2w") pod "dc3c22fd-7723-4f81-af93-2cf3a150cd08" (UID: "dc3c22fd-7723-4f81-af93-2cf3a150cd08"). InnerVolumeSpecName "kube-api-access-66g2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.393358 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-scripts" (OuterVolumeSpecName: "scripts") pod "dc3c22fd-7723-4f81-af93-2cf3a150cd08" (UID: "dc3c22fd-7723-4f81-af93-2cf3a150cd08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.418869 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc3c22fd-7723-4f81-af93-2cf3a150cd08" (UID: "dc3c22fd-7723-4f81-af93-2cf3a150cd08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.424378 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-config-data" (OuterVolumeSpecName: "config-data") pod "dc3c22fd-7723-4f81-af93-2cf3a150cd08" (UID: "dc3c22fd-7723-4f81-af93-2cf3a150cd08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.483499 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.483535 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.483550 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66g2w\" (UniqueName: \"kubernetes.io/projected/dc3c22fd-7723-4f81-af93-2cf3a150cd08-kube-api-access-66g2w\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.483563 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc3c22fd-7723-4f81-af93-2cf3a150cd08-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.881260 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" event={"ID":"dc3c22fd-7723-4f81-af93-2cf3a150cd08","Type":"ContainerDied","Data":"0a2350d0bd8a064f60a957cc924c67c5745fb1c12d380407e406139a368da23b"} Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.881310 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a2350d0bd8a064f60a957cc924c67c5745fb1c12d380407e406139a368da23b" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.881311 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-h2tdk" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962066 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 14:52:20 crc kubenswrapper[4879]: E1125 14:52:20.962476 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="extract-content" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962491 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="extract-content" Nov 25 14:52:20 crc kubenswrapper[4879]: E1125 14:52:20.962512 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" containerName="nova-cell0-conductor-db-sync" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962519 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" containerName="nova-cell0-conductor-db-sync" Nov 25 14:52:20 crc kubenswrapper[4879]: E1125 14:52:20.962536 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="extract-utilities" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962542 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="extract-utilities" Nov 25 14:52:20 crc kubenswrapper[4879]: E1125 14:52:20.962553 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962559 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962764 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="78bf95c7-c5cc-4ea8-80de-f76070054d12" containerName="registry-server" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.962797 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" containerName="nova-cell0-conductor-db-sync" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.963479 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.972106 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.972275 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-fdslq" Nov 25 14:52:20 crc kubenswrapper[4879]: I1125 14:52:20.979922 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.096548 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkmmd\" (UniqueName: \"kubernetes.io/projected/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-kube-api-access-xkmmd\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.096618 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.096660 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.198705 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkmmd\" (UniqueName: \"kubernetes.io/projected/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-kube-api-access-xkmmd\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.198797 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.198884 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.205300 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.210915 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.223404 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkmmd\" (UniqueName: \"kubernetes.io/projected/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-kube-api-access-xkmmd\") pod \"nova-cell0-conductor-0\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.281947 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.718551 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 14:52:21 crc kubenswrapper[4879]: W1125 14:52:21.722437 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcfac5ba_5544_4d76_af22_0c8b6b9028a7.slice/crio-0cc2c5bcd6a1005db1627d69760ce7fdbcbfda7164518d2bc0452fefda4d93db WatchSource:0}: Error finding container 0cc2c5bcd6a1005db1627d69760ce7fdbcbfda7164518d2bc0452fefda4d93db: Status 404 returned error can't find the container with id 0cc2c5bcd6a1005db1627d69760ce7fdbcbfda7164518d2bc0452fefda4d93db Nov 25 14:52:21 crc kubenswrapper[4879]: I1125 14:52:21.891062 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fcfac5ba-5544-4d76-af22-0c8b6b9028a7","Type":"ContainerStarted","Data":"0cc2c5bcd6a1005db1627d69760ce7fdbcbfda7164518d2bc0452fefda4d93db"} Nov 25 14:52:22 crc kubenswrapper[4879]: I1125 14:52:22.903008 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fcfac5ba-5544-4d76-af22-0c8b6b9028a7","Type":"ContainerStarted","Data":"384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2"} Nov 25 14:52:22 crc kubenswrapper[4879]: I1125 14:52:22.904175 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:22 crc kubenswrapper[4879]: I1125 14:52:22.927704 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.927682593 podStartE2EDuration="2.927682593s" podCreationTimestamp="2025-11-25 14:52:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:22.922869714 +0000 UTC m=+1634.526282785" watchObservedRunningTime="2025-11-25 14:52:22.927682593 +0000 UTC m=+1634.531095674" Nov 25 14:52:25 crc kubenswrapper[4879]: I1125 14:52:25.603199 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.270868 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.271638 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" containerID="cri-o://ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c" gracePeriod=30 Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.763708 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.895546 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnv9c\" (UniqueName: \"kubernetes.io/projected/821c14ef-2ea4-488d-84f9-2234a6e27447-kube-api-access-tnv9c\") pod \"821c14ef-2ea4-488d-84f9-2234a6e27447\" (UID: \"821c14ef-2ea4-488d-84f9-2234a6e27447\") " Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.901927 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/821c14ef-2ea4-488d-84f9-2234a6e27447-kube-api-access-tnv9c" (OuterVolumeSpecName: "kube-api-access-tnv9c") pod "821c14ef-2ea4-488d-84f9-2234a6e27447" (UID: "821c14ef-2ea4-488d-84f9-2234a6e27447"). InnerVolumeSpecName "kube-api-access-tnv9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.969554 4879 generic.go:334] "Generic (PLEG): container finished" podID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerID="ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c" exitCode=2 Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.969608 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerDied","Data":"ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c"} Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.969641 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"821c14ef-2ea4-488d-84f9-2234a6e27447","Type":"ContainerDied","Data":"c73f12ccd9ac7776c7a99ae48420cf07b60b7231ba61e30eed05d67cb274e60a"} Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.969663 4879 scope.go:117] "RemoveContainer" containerID="ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c" Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.969659 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.992742 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:52:29 crc kubenswrapper[4879]: I1125 14:52:29.998646 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnv9c\" (UniqueName: \"kubernetes.io/projected/821c14ef-2ea4-488d-84f9-2234a6e27447-kube-api-access-tnv9c\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.018223 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.038208 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.039887 4879 scope.go:117] "RemoveContainer" containerID="ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c" Nov 25 14:52:30 crc kubenswrapper[4879]: E1125 14:52:30.040406 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c\": container with ID starting with ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c not found: ID does not exist" containerID="ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.040445 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c"} err="failed to get container status \"ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c\": rpc error: code = NotFound desc = could not find container \"ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c\": container with ID starting with ecb058a6bf1fccc28a9d49bb3ce8790e9e9f656982b7cbf209a78abdfc85b98c not found: ID does not exist" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.040474 4879 scope.go:117] "RemoveContainer" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:52:30 crc kubenswrapper[4879]: E1125 14:52:30.040772 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07\": container with ID starting with 2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07 not found: ID does not exist" containerID="2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.040796 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07"} err="failed to get container status \"2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07\": rpc error: code = NotFound desc = could not find container \"2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07\": container with ID starting with 2525dd5ca193ff4a77975c5915a59ec6350f66fb5cf2c54b813794b467ee4d07 not found: ID does not exist" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050029 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:52:30 crc kubenswrapper[4879]: E1125 14:52:30.050480 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050504 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: E1125 14:52:30.050526 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050537 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: E1125 14:52:30.050558 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050564 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: E1125 14:52:30.050576 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050582 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050760 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050779 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.050790 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.051541 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.055457 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.056149 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.058692 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.202481 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.202528 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57shw\" (UniqueName: \"kubernetes.io/projected/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-api-access-57shw\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.202567 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.202770 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.304256 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.304324 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57shw\" (UniqueName: \"kubernetes.io/projected/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-api-access-57shw\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.304380 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.304506 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.308849 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.308932 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.309164 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.322088 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57shw\" (UniqueName: \"kubernetes.io/projected/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-api-access-57shw\") pod \"kube-state-metrics-0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.391338 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.841493 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:52:30 crc kubenswrapper[4879]: I1125 14:52:30.983619 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0","Type":"ContainerStarted","Data":"2820b638963ed956cd317f870e010584e45dd246c1c30739a5626f3464ba4e1e"} Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.193930 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.194280 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-central-agent" containerID="cri-o://a2b9ee4547691ec96e6670750b2cc32a19928ecd5cdbcd17afe8a1ee739c36a5" gracePeriod=30 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.194360 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="sg-core" containerID="cri-o://aea4ab921de625d2f2a93b9ba0a16b733f6e9d8774bbf98822ba8a9f57472f6b" gracePeriod=30 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.194381 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-notification-agent" containerID="cri-o://5d9f8f38b13f1bf688276aaa06f843d0971b138454b14935ac8abfccf5af3f42" gracePeriod=30 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.194416 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="proxy-httpd" containerID="cri-o://4a17aaaaaf436334d2e852901462c2d7dc9e508b8bf0cfa92d0694494a823150" gracePeriod=30 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.319567 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.656662 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" path="/var/lib/kubelet/pods/821c14ef-2ea4-488d-84f9-2234a6e27447/volumes" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.826982 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-w29sx"] Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.827736 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="821c14ef-2ea4-488d-84f9-2234a6e27447" containerName="kube-state-metrics" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.828419 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.831088 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.831372 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.837945 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w29sx"] Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.934043 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.934166 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-scripts\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.934199 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-config-data\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.934548 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wqdp\" (UniqueName: \"kubernetes.io/projected/64f062b9-8008-483b-b61c-07c621b06e67-kube-api-access-5wqdp\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.997366 4879 generic.go:334] "Generic (PLEG): container finished" podID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerID="4a17aaaaaf436334d2e852901462c2d7dc9e508b8bf0cfa92d0694494a823150" exitCode=0 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.997400 4879 generic.go:334] "Generic (PLEG): container finished" podID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerID="aea4ab921de625d2f2a93b9ba0a16b733f6e9d8774bbf98822ba8a9f57472f6b" exitCode=2 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.997407 4879 generic.go:334] "Generic (PLEG): container finished" podID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerID="a2b9ee4547691ec96e6670750b2cc32a19928ecd5cdbcd17afe8a1ee739c36a5" exitCode=0 Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.997459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerDied","Data":"4a17aaaaaf436334d2e852901462c2d7dc9e508b8bf0cfa92d0694494a823150"} Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.997532 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerDied","Data":"aea4ab921de625d2f2a93b9ba0a16b733f6e9d8774bbf98822ba8a9f57472f6b"} Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.997543 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerDied","Data":"a2b9ee4547691ec96e6670750b2cc32a19928ecd5cdbcd17afe8a1ee739c36a5"} Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.999658 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0","Type":"ContainerStarted","Data":"d1174852e93b7c0b41154253368f50c3ee1e1d604b6b7130605e291d51af060e"} Nov 25 14:52:31 crc kubenswrapper[4879]: I1125 14:52:31.999815 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.018838 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.025027 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.028188 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.031451 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.036130 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wqdp\" (UniqueName: \"kubernetes.io/projected/64f062b9-8008-483b-b61c-07c621b06e67-kube-api-access-5wqdp\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.036239 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.036283 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-scripts\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.036303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-config-data\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.048838 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.050926 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-scripts\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.065845 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.067713 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.079515 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.081875 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-config-data\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.094438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wqdp\" (UniqueName: \"kubernetes.io/projected/64f062b9-8008-483b-b61c-07c621b06e67-kube-api-access-5wqdp\") pod \"nova-cell0-cell-mapping-w29sx\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.114061 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.118927 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.7501290200000001 podStartE2EDuration="2.118907803s" podCreationTimestamp="2025-11-25 14:52:30 +0000 UTC" firstStartedPulling="2025-11-25 14:52:30.849182385 +0000 UTC m=+1642.452595456" lastFinishedPulling="2025-11-25 14:52:31.217961168 +0000 UTC m=+1642.821374239" observedRunningTime="2025-11-25 14:52:32.087405998 +0000 UTC m=+1643.690819069" watchObservedRunningTime="2025-11-25 14:52:32.118907803 +0000 UTC m=+1643.722320874" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138623 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138719 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-config-data\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138759 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138783 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6lkf\" (UniqueName: \"kubernetes.io/projected/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-kube-api-access-l6lkf\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138810 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-config-data\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138839 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-logs\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.138999 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkmlj\" (UniqueName: \"kubernetes.io/projected/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-kube-api-access-nkmlj\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.139033 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-logs\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.161055 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.184442 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.185687 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.187889 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.229957 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244078 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244174 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244217 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-config-data\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244248 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244267 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6lkf\" (UniqueName: \"kubernetes.io/projected/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-kube-api-access-l6lkf\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244289 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-config-data\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244310 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-config-data\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244336 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-logs\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244374 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4tx7\" (UniqueName: \"kubernetes.io/projected/b4f5462a-a253-4d98-a059-b429cc926dff-kube-api-access-h4tx7\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244467 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkmlj\" (UniqueName: \"kubernetes.io/projected/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-kube-api-access-nkmlj\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244494 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-logs\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.244971 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-logs\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.245377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-logs\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.274948 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-config-data\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.276092 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-config-data\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.276317 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.278086 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkmlj\" (UniqueName: \"kubernetes.io/projected/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-kube-api-access-nkmlj\") pod \"nova-metadata-0\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.286351 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6lkf\" (UniqueName: \"kubernetes.io/projected/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-kube-api-access-l6lkf\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.288643 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.336991 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-77frl"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.343356 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.345142 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.353907 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.354061 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-config-data\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.354182 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4tx7\" (UniqueName: \"kubernetes.io/projected/b4f5462a-a253-4d98-a059-b429cc926dff-kube-api-access-h4tx7\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.361462 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-config-data\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.364249 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.376022 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4tx7\" (UniqueName: \"kubernetes.io/projected/b4f5462a-a253-4d98-a059-b429cc926dff-kube-api-access-h4tx7\") pod \"nova-scheduler-0\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.454235 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-77frl"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.460051 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzx27\" (UniqueName: \"kubernetes.io/projected/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-kube-api-access-jzx27\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.460149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-svc\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.460185 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.460230 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.460263 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.460316 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-config\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.465623 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.466963 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.468195 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.486696 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.496039 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.534820 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564272 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nf6c\" (UniqueName: \"kubernetes.io/projected/e9df1834-6236-443d-8662-4da48203efac-kube-api-access-6nf6c\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564343 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564380 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564414 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564458 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564493 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-config\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564590 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzx27\" (UniqueName: \"kubernetes.io/projected/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-kube-api-access-jzx27\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564647 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-svc\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.564677 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.565800 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.566077 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-config\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.566789 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.566923 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-svc\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.569866 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.588655 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzx27\" (UniqueName: \"kubernetes.io/projected/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-kube-api-access-jzx27\") pod \"dnsmasq-dns-bccf8f775-77frl\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.666709 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nf6c\" (UniqueName: \"kubernetes.io/projected/e9df1834-6236-443d-8662-4da48203efac-kube-api-access-6nf6c\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.666769 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.666821 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.674772 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.683594 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.690352 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.704217 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nf6c\" (UniqueName: \"kubernetes.io/projected/e9df1834-6236-443d-8662-4da48203efac-kube-api-access-6nf6c\") pod \"nova-cell1-novncproxy-0\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.870155 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w29sx"] Nov 25 14:52:32 crc kubenswrapper[4879]: I1125 14:52:32.983956 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.012595 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w29sx" event={"ID":"64f062b9-8008-483b-b61c-07c621b06e67","Type":"ContainerStarted","Data":"59ac9fe6a31c8e270357539b4c8f95a49b97f91f1643d79fc61110a758f4cdba"} Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.097812 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.123036 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:33 crc kubenswrapper[4879]: W1125 14:52:33.140912 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9dfe2d4_f7d5_4dc7_b734_c6136838325b.slice/crio-538d77671bdb3741165203077ab83dad1b52d01f11187430730b9785d9426b01 WatchSource:0}: Error finding container 538d77671bdb3741165203077ab83dad1b52d01f11187430730b9785d9426b01: Status 404 returned error can't find the container with id 538d77671bdb3741165203077ab83dad1b52d01f11187430730b9785d9426b01 Nov 25 14:52:33 crc kubenswrapper[4879]: W1125 14:52:33.155840 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e765f4b_ebe3_47c0_9a96_a0a5b8b7a36b.slice/crio-f86e7122d166362f7caeb96ea094d234b1f371afa2831ba40a46a570d90b355c WatchSource:0}: Error finding container f86e7122d166362f7caeb96ea094d234b1f371afa2831ba40a46a570d90b355c: Status 404 returned error can't find the container with id f86e7122d166362f7caeb96ea094d234b1f371afa2831ba40a46a570d90b355c Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.309151 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-77frl"] Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.333862 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.390665 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qncf"] Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.411895 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.413525 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qncf"] Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.427030 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.427495 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.513847 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-config-data\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.513925 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhn2v\" (UniqueName: \"kubernetes.io/projected/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-kube-api-access-qhn2v\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.514275 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-scripts\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.514320 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.616487 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-scripts\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.617980 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.618261 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-config-data\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.618341 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhn2v\" (UniqueName: \"kubernetes.io/projected/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-kube-api-access-qhn2v\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.624003 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.627577 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-scripts\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.627928 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-config-data\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.642809 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhn2v\" (UniqueName: \"kubernetes.io/projected/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-kube-api-access-qhn2v\") pod \"nova-cell1-conductor-db-sync-6qncf\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.737190 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:33 crc kubenswrapper[4879]: I1125 14:52:33.748703 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.029193 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b4f5462a-a253-4d98-a059-b429cc926dff","Type":"ContainerStarted","Data":"3bd65040c84eff20d2fd7822238c4f11a61c7f617572a80c5316026379c706e9"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.032703 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w29sx" event={"ID":"64f062b9-8008-483b-b61c-07c621b06e67","Type":"ContainerStarted","Data":"3026ef6362da67dd8812a17397d7367d13aad659ca6534125a24e47f36e7fe36"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.036147 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9dfe2d4-f7d5-4dc7-b734-c6136838325b","Type":"ContainerStarted","Data":"538d77671bdb3741165203077ab83dad1b52d01f11187430730b9785d9426b01"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.038858 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b","Type":"ContainerStarted","Data":"f86e7122d166362f7caeb96ea094d234b1f371afa2831ba40a46a570d90b355c"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.041503 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9df1834-6236-443d-8662-4da48203efac","Type":"ContainerStarted","Data":"4d8ad6e1dbe9f0dac069f7feb9fdd67a3936b18ff80981636645cc8fbab1b9bf"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.044207 4879 generic.go:334] "Generic (PLEG): container finished" podID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerID="0a434fb6a19b168824100b0429593a0f740fef032e6170bd52459caab3ec34fe" exitCode=0 Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.044251 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-77frl" event={"ID":"2fcb28ab-a9d4-43df-9beb-b6658e91e39c","Type":"ContainerDied","Data":"0a434fb6a19b168824100b0429593a0f740fef032e6170bd52459caab3ec34fe"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.045314 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-77frl" event={"ID":"2fcb28ab-a9d4-43df-9beb-b6658e91e39c","Type":"ContainerStarted","Data":"0a8354e52d5d58e1f91fdbc26af586129c3f3bc66d633372e3a6261865009b9f"} Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.063681 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-w29sx" podStartSLOduration=3.063659101 podStartE2EDuration="3.063659101s" podCreationTimestamp="2025-11-25 14:52:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:34.0490808 +0000 UTC m=+1645.652493871" watchObservedRunningTime="2025-11-25 14:52:34.063659101 +0000 UTC m=+1645.667072172" Nov 25 14:52:34 crc kubenswrapper[4879]: I1125 14:52:34.384523 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qncf"] Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.071411 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qncf" event={"ID":"20c70c15-43aa-44ee-8a6c-ae31b460ac5e","Type":"ContainerStarted","Data":"34f47d04d59bb556c83ac90c7fd44769f03020a5c2534c1b206563b49dfba9f1"} Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.071765 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qncf" event={"ID":"20c70c15-43aa-44ee-8a6c-ae31b460ac5e","Type":"ContainerStarted","Data":"630a493a4855123d3e85d3f2ca57d8457f5b31bb588e230df2e474a57b69e0f1"} Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.080149 4879 generic.go:334] "Generic (PLEG): container finished" podID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerID="5d9f8f38b13f1bf688276aaa06f843d0971b138454b14935ac8abfccf5af3f42" exitCode=0 Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.080218 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerDied","Data":"5d9f8f38b13f1bf688276aaa06f843d0971b138454b14935ac8abfccf5af3f42"} Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.088579 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-77frl" event={"ID":"2fcb28ab-a9d4-43df-9beb-b6658e91e39c","Type":"ContainerStarted","Data":"14723245bbfb3df2adc0ca36d6f3662562c586c27a3bcdaf7d7283f32dac0cc8"} Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.088666 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.099104 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-6qncf" podStartSLOduration=2.099070099 podStartE2EDuration="2.099070099s" podCreationTimestamp="2025-11-25 14:52:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:35.094882747 +0000 UTC m=+1646.698295838" watchObservedRunningTime="2025-11-25 14:52:35.099070099 +0000 UTC m=+1646.702483170" Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.124825 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-77frl" podStartSLOduration=3.124791788 podStartE2EDuration="3.124791788s" podCreationTimestamp="2025-11-25 14:52:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:35.116165447 +0000 UTC m=+1646.719578518" watchObservedRunningTime="2025-11-25 14:52:35.124791788 +0000 UTC m=+1646.728204859" Nov 25 14:52:35 crc kubenswrapper[4879]: I1125 14:52:35.334045 4879 scope.go:117] "RemoveContainer" containerID="c0a7c3847a43fe9b918c2eab99e478939ca48e5538407ab69dfa631a6c8584a3" Nov 25 14:52:36 crc kubenswrapper[4879]: I1125 14:52:36.197402 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:36 crc kubenswrapper[4879]: I1125 14:52:36.216859 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.494885 4879 scope.go:117] "RemoveContainer" containerID="41c2542e53d215903d6489c638cfa4d00898988a7f57f64c5234d8ed55e5482a" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.690452 4879 scope.go:117] "RemoveContainer" containerID="fc769144da28d1f800fcf592ed4bddb3bf523a034439a16fc93e6ad5fd5a7fb5" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.793513 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.827762 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-log-httpd\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.828181 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-combined-ca-bundle\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.828274 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5s69\" (UniqueName: \"kubernetes.io/projected/34855c13-ac2b-491b-a376-8d1c3be6344f-kube-api-access-r5s69\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.828309 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-config-data\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.828339 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-sg-core-conf-yaml\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.828416 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-scripts\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.828449 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-run-httpd\") pod \"34855c13-ac2b-491b-a376-8d1c3be6344f\" (UID: \"34855c13-ac2b-491b-a376-8d1c3be6344f\") " Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.829765 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.830140 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.830944 4879 scope.go:117] "RemoveContainer" containerID="77b734dac422c939eb4feed9ab24da67b2dcb6856f1c0be62ce813ed6db75a14" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.870191 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-scripts" (OuterVolumeSpecName: "scripts") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.870630 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34855c13-ac2b-491b-a376-8d1c3be6344f-kube-api-access-r5s69" (OuterVolumeSpecName: "kube-api-access-r5s69") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "kube-api-access-r5s69". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.931587 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5s69\" (UniqueName: \"kubernetes.io/projected/34855c13-ac2b-491b-a376-8d1c3be6344f-kube-api-access-r5s69\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.931645 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.931658 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:37 crc kubenswrapper[4879]: I1125 14:52:37.931669 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/34855c13-ac2b-491b-a376-8d1c3be6344f-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.011807 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.035088 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.069150 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.094853 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-config-data" (OuterVolumeSpecName: "config-data") pod "34855c13-ac2b-491b-a376-8d1c3be6344f" (UID: "34855c13-ac2b-491b-a376-8d1c3be6344f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.123965 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b","Type":"ContainerStarted","Data":"0c341d493c2ea79312086f53b3151a095683fbeccc1f0a0e85bd205019c59b7e"} Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.128801 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9df1834-6236-443d-8662-4da48203efac","Type":"ContainerStarted","Data":"3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb"} Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.128892 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e9df1834-6236-443d-8662-4da48203efac" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb" gracePeriod=30 Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.136574 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.136600 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34855c13-ac2b-491b-a376-8d1c3be6344f-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.137086 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.137179 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"34855c13-ac2b-491b-a376-8d1c3be6344f","Type":"ContainerDied","Data":"c3e448cbd2a3ea7e53bc445493f42f6bf1e17c2ed7e97b9d4ed9e17be4c31f98"} Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.137229 4879 scope.go:117] "RemoveContainer" containerID="4a17aaaaaf436334d2e852901462c2d7dc9e508b8bf0cfa92d0694494a823150" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.153301 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9dfe2d4-f7d5-4dc7-b734-c6136838325b","Type":"ContainerStarted","Data":"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6"} Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.158194 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.429673049 podStartE2EDuration="6.158179661s" podCreationTimestamp="2025-11-25 14:52:32 +0000 UTC" firstStartedPulling="2025-11-25 14:52:33.828863228 +0000 UTC m=+1645.432276299" lastFinishedPulling="2025-11-25 14:52:37.55736984 +0000 UTC m=+1649.160782911" observedRunningTime="2025-11-25 14:52:38.153519627 +0000 UTC m=+1649.756932698" watchObservedRunningTime="2025-11-25 14:52:38.158179661 +0000 UTC m=+1649.761592732" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.160908 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b4f5462a-a253-4d98-a059-b429cc926dff","Type":"ContainerStarted","Data":"7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9"} Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.182260 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.00540879 podStartE2EDuration="6.182239586s" podCreationTimestamp="2025-11-25 14:52:32 +0000 UTC" firstStartedPulling="2025-11-25 14:52:33.377314497 +0000 UTC m=+1644.980727568" lastFinishedPulling="2025-11-25 14:52:37.554145293 +0000 UTC m=+1649.157558364" observedRunningTime="2025-11-25 14:52:38.17601344 +0000 UTC m=+1649.779426521" watchObservedRunningTime="2025-11-25 14:52:38.182239586 +0000 UTC m=+1649.785652657" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.189675 4879 scope.go:117] "RemoveContainer" containerID="aea4ab921de625d2f2a93b9ba0a16b733f6e9d8774bbf98822ba8a9f57472f6b" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.218266 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.226081 4879 scope.go:117] "RemoveContainer" containerID="5d9f8f38b13f1bf688276aaa06f843d0971b138454b14935ac8abfccf5af3f42" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.230685 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.277439 4879 scope.go:117] "RemoveContainer" containerID="a2b9ee4547691ec96e6670750b2cc32a19928ecd5cdbcd17afe8a1ee739c36a5" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.282664 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:52:38 crc kubenswrapper[4879]: E1125 14:52:38.283246 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="proxy-httpd" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283262 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="proxy-httpd" Nov 25 14:52:38 crc kubenswrapper[4879]: E1125 14:52:38.283283 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-notification-agent" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283292 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-notification-agent" Nov 25 14:52:38 crc kubenswrapper[4879]: E1125 14:52:38.283310 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-central-agent" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283317 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-central-agent" Nov 25 14:52:38 crc kubenswrapper[4879]: E1125 14:52:38.283336 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="sg-core" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283343 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="sg-core" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283570 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-central-agent" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283598 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="sg-core" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283621 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="proxy-httpd" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.283635 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" containerName="ceilometer-notification-agent" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.286081 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.289549 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.290151 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.290737 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.294146 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340428 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340491 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-run-httpd\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340553 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-scripts\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340598 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340813 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-config-data\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340894 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsbws\" (UniqueName: \"kubernetes.io/projected/d0c35a74-170c-493d-a037-bc7074836e43-kube-api-access-jsbws\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.340921 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-log-httpd\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.341000 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.442915 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.442966 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-run-httpd\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443018 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-scripts\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443060 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443147 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-config-data\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443185 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsbws\" (UniqueName: \"kubernetes.io/projected/d0c35a74-170c-493d-a037-bc7074836e43-kube-api-access-jsbws\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443211 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-log-httpd\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443254 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443587 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-run-httpd\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.443923 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-log-httpd\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.448717 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.449048 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.449308 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.449494 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-config-data\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.459160 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-scripts\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.465808 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsbws\" (UniqueName: \"kubernetes.io/projected/d0c35a74-170c-493d-a037-bc7074836e43-kube-api-access-jsbws\") pod \"ceilometer-0\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " pod="openstack/ceilometer-0" Nov 25 14:52:38 crc kubenswrapper[4879]: I1125 14:52:38.610269 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.175141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9dfe2d4-f7d5-4dc7-b734-c6136838325b","Type":"ContainerStarted","Data":"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b"} Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.175271 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-log" containerID="cri-o://a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6" gracePeriod=30 Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.175305 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-metadata" containerID="cri-o://2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b" gracePeriod=30 Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.181074 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b","Type":"ContainerStarted","Data":"56898260aedf25481718571ae793d987a939f42ccba1bcf6b9440d2851fec5eb"} Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.188653 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:52:39 crc kubenswrapper[4879]: W1125 14:52:39.195302 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0c35a74_170c_493d_a037_bc7074836e43.slice/crio-22e5ff2b183ae6908c001765c3d5a1be6fee3b18b38c883bf1d575101d5aede0 WatchSource:0}: Error finding container 22e5ff2b183ae6908c001765c3d5a1be6fee3b18b38c883bf1d575101d5aede0: Status 404 returned error can't find the container with id 22e5ff2b183ae6908c001765c3d5a1be6fee3b18b38c883bf1d575101d5aede0 Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.207304 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.791212508 podStartE2EDuration="7.207284897s" podCreationTimestamp="2025-11-25 14:52:32 +0000 UTC" firstStartedPulling="2025-11-25 14:52:33.148286729 +0000 UTC m=+1644.751699810" lastFinishedPulling="2025-11-25 14:52:37.564359128 +0000 UTC m=+1649.167772199" observedRunningTime="2025-11-25 14:52:39.205632843 +0000 UTC m=+1650.809045914" watchObservedRunningTime="2025-11-25 14:52:39.207284897 +0000 UTC m=+1650.810697968" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.238986 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.839312057 podStartE2EDuration="8.238961996s" podCreationTimestamp="2025-11-25 14:52:31 +0000 UTC" firstStartedPulling="2025-11-25 14:52:33.164691078 +0000 UTC m=+1644.768104159" lastFinishedPulling="2025-11-25 14:52:37.564341027 +0000 UTC m=+1649.167754098" observedRunningTime="2025-11-25 14:52:39.230301714 +0000 UTC m=+1650.833714805" watchObservedRunningTime="2025-11-25 14:52:39.238961996 +0000 UTC m=+1650.842375067" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.656777 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34855c13-ac2b-491b-a376-8d1c3be6344f" path="/var/lib/kubelet/pods/34855c13-ac2b-491b-a376-8d1c3be6344f/volumes" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.766873 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.868684 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-config-data\") pod \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.868830 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkmlj\" (UniqueName: \"kubernetes.io/projected/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-kube-api-access-nkmlj\") pod \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.868882 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-logs\") pod \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.868987 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-combined-ca-bundle\") pod \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\" (UID: \"b9dfe2d4-f7d5-4dc7-b734-c6136838325b\") " Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.870584 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-logs" (OuterVolumeSpecName: "logs") pod "b9dfe2d4-f7d5-4dc7-b734-c6136838325b" (UID: "b9dfe2d4-f7d5-4dc7-b734-c6136838325b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.880301 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-kube-api-access-nkmlj" (OuterVolumeSpecName: "kube-api-access-nkmlj") pod "b9dfe2d4-f7d5-4dc7-b734-c6136838325b" (UID: "b9dfe2d4-f7d5-4dc7-b734-c6136838325b"). InnerVolumeSpecName "kube-api-access-nkmlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.901898 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-config-data" (OuterVolumeSpecName: "config-data") pod "b9dfe2d4-f7d5-4dc7-b734-c6136838325b" (UID: "b9dfe2d4-f7d5-4dc7-b734-c6136838325b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.903372 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9dfe2d4-f7d5-4dc7-b734-c6136838325b" (UID: "b9dfe2d4-f7d5-4dc7-b734-c6136838325b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.972171 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.972242 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkmlj\" (UniqueName: \"kubernetes.io/projected/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-kube-api-access-nkmlj\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.972257 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:39 crc kubenswrapper[4879]: I1125 14:52:39.972269 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9dfe2d4-f7d5-4dc7-b734-c6136838325b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.208809 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerStarted","Data":"22e5ff2b183ae6908c001765c3d5a1be6fee3b18b38c883bf1d575101d5aede0"} Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.211922 4879 generic.go:334] "Generic (PLEG): container finished" podID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerID="2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b" exitCode=0 Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.211954 4879 generic.go:334] "Generic (PLEG): container finished" podID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerID="a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6" exitCode=143 Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.212350 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.212343 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9dfe2d4-f7d5-4dc7-b734-c6136838325b","Type":"ContainerDied","Data":"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b"} Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.212493 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9dfe2d4-f7d5-4dc7-b734-c6136838325b","Type":"ContainerDied","Data":"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6"} Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.212509 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b9dfe2d4-f7d5-4dc7-b734-c6136838325b","Type":"ContainerDied","Data":"538d77671bdb3741165203077ab83dad1b52d01f11187430730b9785d9426b01"} Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.212528 4879 scope.go:117] "RemoveContainer" containerID="2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.245076 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.256334 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.257525 4879 scope.go:117] "RemoveContainer" containerID="a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.269533 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:40 crc kubenswrapper[4879]: E1125 14:52:40.269943 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-log" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.269962 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-log" Nov 25 14:52:40 crc kubenswrapper[4879]: E1125 14:52:40.270004 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-metadata" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.270011 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-metadata" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.270327 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-log" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.270355 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" containerName="nova-metadata-metadata" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.271670 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.275562 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.275768 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.301485 4879 scope.go:117] "RemoveContainer" containerID="2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.305393 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:40 crc kubenswrapper[4879]: E1125 14:52:40.307352 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b\": container with ID starting with 2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b not found: ID does not exist" containerID="2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.307463 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b"} err="failed to get container status \"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b\": rpc error: code = NotFound desc = could not find container \"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b\": container with ID starting with 2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b not found: ID does not exist" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.307496 4879 scope.go:117] "RemoveContainer" containerID="a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6" Nov 25 14:52:40 crc kubenswrapper[4879]: E1125 14:52:40.308049 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6\": container with ID starting with a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6 not found: ID does not exist" containerID="a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.308120 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6"} err="failed to get container status \"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6\": rpc error: code = NotFound desc = could not find container \"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6\": container with ID starting with a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6 not found: ID does not exist" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.308175 4879 scope.go:117] "RemoveContainer" containerID="2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.309390 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b"} err="failed to get container status \"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b\": rpc error: code = NotFound desc = could not find container \"2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b\": container with ID starting with 2573da4da6dd3470b4decf20ede43eedbb01b24af2b6eee3fb33ded2d50ace6b not found: ID does not exist" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.309427 4879 scope.go:117] "RemoveContainer" containerID="a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.309803 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6"} err="failed to get container status \"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6\": rpc error: code = NotFound desc = could not find container \"a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6\": container with ID starting with a26bfa51645c48f1f0d5a4f8d79b6355998dcd9e9254ac1367a211fc0653baf6 not found: ID does not exist" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.383424 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.383507 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d94b075f-320a-4140-9232-eb6b6db0228e-logs\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.383618 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlp7m\" (UniqueName: \"kubernetes.io/projected/d94b075f-320a-4140-9232-eb6b6db0228e-kube-api-access-wlp7m\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.383846 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-config-data\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.383934 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.404495 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.485324 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d94b075f-320a-4140-9232-eb6b6db0228e-logs\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.485423 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlp7m\" (UniqueName: \"kubernetes.io/projected/d94b075f-320a-4140-9232-eb6b6db0228e-kube-api-access-wlp7m\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.485534 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-config-data\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.485567 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.485689 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.485820 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d94b075f-320a-4140-9232-eb6b6db0228e-logs\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.493651 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.500993 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.512847 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-config-data\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.512936 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlp7m\" (UniqueName: \"kubernetes.io/projected/d94b075f-320a-4140-9232-eb6b6db0228e-kube-api-access-wlp7m\") pod \"nova-metadata-0\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " pod="openstack/nova-metadata-0" Nov 25 14:52:40 crc kubenswrapper[4879]: I1125 14:52:40.613533 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:41 crc kubenswrapper[4879]: I1125 14:52:41.088040 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:41 crc kubenswrapper[4879]: W1125 14:52:41.090036 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd94b075f_320a_4140_9232_eb6b6db0228e.slice/crio-debfd33d47fa12c57241e253159fd381815a42e88d804d1f1356e812b062e922 WatchSource:0}: Error finding container debfd33d47fa12c57241e253159fd381815a42e88d804d1f1356e812b062e922: Status 404 returned error can't find the container with id debfd33d47fa12c57241e253159fd381815a42e88d804d1f1356e812b062e922 Nov 25 14:52:41 crc kubenswrapper[4879]: I1125 14:52:41.242505 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d94b075f-320a-4140-9232-eb6b6db0228e","Type":"ContainerStarted","Data":"debfd33d47fa12c57241e253159fd381815a42e88d804d1f1356e812b062e922"} Nov 25 14:52:41 crc kubenswrapper[4879]: I1125 14:52:41.245540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerStarted","Data":"f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227"} Nov 25 14:52:41 crc kubenswrapper[4879]: I1125 14:52:41.659428 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9dfe2d4-f7d5-4dc7-b734-c6136838325b" path="/var/lib/kubelet/pods/b9dfe2d4-f7d5-4dc7-b734-c6136838325b/volumes" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.263099 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerStarted","Data":"967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373"} Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.266443 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d94b075f-320a-4140-9232-eb6b6db0228e","Type":"ContainerStarted","Data":"47da48436bd6446c30fdb8cb9e50ce1df6a2efb7ac0b149c20f95c06171f1513"} Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.266776 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d94b075f-320a-4140-9232-eb6b6db0228e","Type":"ContainerStarted","Data":"8e873910359e08effd1cdd6f5e4f7ed9dde06a82dfb691ba4ccdd34b3b0cf683"} Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.307530 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.30750262 podStartE2EDuration="2.30750262s" podCreationTimestamp="2025-11-25 14:52:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:42.292485928 +0000 UTC m=+1653.895898999" watchObservedRunningTime="2025-11-25 14:52:42.30750262 +0000 UTC m=+1653.910915691" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.349304 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.349473 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.536081 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.536490 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.572821 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.682418 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.771864 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-psxqm"] Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.772588 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerName="dnsmasq-dns" containerID="cri-o://156acc7c316592f8270ffab312637d1aa68721d3bb28b77e78eefde693e3b452" gracePeriod=10 Nov 25 14:52:42 crc kubenswrapper[4879]: I1125 14:52:42.990418 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.284073 4879 generic.go:334] "Generic (PLEG): container finished" podID="64f062b9-8008-483b-b61c-07c621b06e67" containerID="3026ef6362da67dd8812a17397d7367d13aad659ca6534125a24e47f36e7fe36" exitCode=0 Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.284603 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w29sx" event={"ID":"64f062b9-8008-483b-b61c-07c621b06e67","Type":"ContainerDied","Data":"3026ef6362da67dd8812a17397d7367d13aad659ca6534125a24e47f36e7fe36"} Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.289170 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerStarted","Data":"53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1"} Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.291308 4879 generic.go:334] "Generic (PLEG): container finished" podID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerID="156acc7c316592f8270ffab312637d1aa68721d3bb28b77e78eefde693e3b452" exitCode=0 Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.292419 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" event={"ID":"b8ec0861-1e73-4738-8c15-157fbc5418d4","Type":"ContainerDied","Data":"156acc7c316592f8270ffab312637d1aa68721d3bb28b77e78eefde693e3b452"} Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.348644 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.439933 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.440043 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.570585 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.688826 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-svc\") pod \"b8ec0861-1e73-4738-8c15-157fbc5418d4\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.688912 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-nb\") pod \"b8ec0861-1e73-4738-8c15-157fbc5418d4\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.688984 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-config\") pod \"b8ec0861-1e73-4738-8c15-157fbc5418d4\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.689071 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b698w\" (UniqueName: \"kubernetes.io/projected/b8ec0861-1e73-4738-8c15-157fbc5418d4-kube-api-access-b698w\") pod \"b8ec0861-1e73-4738-8c15-157fbc5418d4\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.689997 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-sb\") pod \"b8ec0861-1e73-4738-8c15-157fbc5418d4\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.692978 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-swift-storage-0\") pod \"b8ec0861-1e73-4738-8c15-157fbc5418d4\" (UID: \"b8ec0861-1e73-4738-8c15-157fbc5418d4\") " Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.710960 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8ec0861-1e73-4738-8c15-157fbc5418d4-kube-api-access-b698w" (OuterVolumeSpecName: "kube-api-access-b698w") pod "b8ec0861-1e73-4738-8c15-157fbc5418d4" (UID: "b8ec0861-1e73-4738-8c15-157fbc5418d4"). InnerVolumeSpecName "kube-api-access-b698w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.778993 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b8ec0861-1e73-4738-8c15-157fbc5418d4" (UID: "b8ec0861-1e73-4738-8c15-157fbc5418d4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.795690 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b698w\" (UniqueName: \"kubernetes.io/projected/b8ec0861-1e73-4738-8c15-157fbc5418d4-kube-api-access-b698w\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.795737 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.863833 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-config" (OuterVolumeSpecName: "config") pod "b8ec0861-1e73-4738-8c15-157fbc5418d4" (UID: "b8ec0861-1e73-4738-8c15-157fbc5418d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.873357 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b8ec0861-1e73-4738-8c15-157fbc5418d4" (UID: "b8ec0861-1e73-4738-8c15-157fbc5418d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.888147 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b8ec0861-1e73-4738-8c15-157fbc5418d4" (UID: "b8ec0861-1e73-4738-8c15-157fbc5418d4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.891103 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b8ec0861-1e73-4738-8c15-157fbc5418d4" (UID: "b8ec0861-1e73-4738-8c15-157fbc5418d4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.896864 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.896902 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.896913 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:43 crc kubenswrapper[4879]: I1125 14:52:43.896921 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8ec0861-1e73-4738-8c15-157fbc5418d4-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.303377 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" event={"ID":"b8ec0861-1e73-4738-8c15-157fbc5418d4","Type":"ContainerDied","Data":"d73a3c6430764643a16f850677a4e3fd28e79e79c16a826b2156b960b10daf19"} Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.303492 4879 scope.go:117] "RemoveContainer" containerID="156acc7c316592f8270ffab312637d1aa68721d3bb28b77e78eefde693e3b452" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.303518 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-psxqm" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.362600 4879 scope.go:117] "RemoveContainer" containerID="7f4a59c2fffdc1d804886afcede4c4e8541a71535286b6d1bba3ae15c99c20ba" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.372286 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-psxqm"] Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.389438 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-psxqm"] Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.844446 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.917872 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wqdp\" (UniqueName: \"kubernetes.io/projected/64f062b9-8008-483b-b61c-07c621b06e67-kube-api-access-5wqdp\") pod \"64f062b9-8008-483b-b61c-07c621b06e67\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.917920 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-combined-ca-bundle\") pod \"64f062b9-8008-483b-b61c-07c621b06e67\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.917977 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-scripts\") pod \"64f062b9-8008-483b-b61c-07c621b06e67\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.918033 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-config-data\") pod \"64f062b9-8008-483b-b61c-07c621b06e67\" (UID: \"64f062b9-8008-483b-b61c-07c621b06e67\") " Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.924294 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-scripts" (OuterVolumeSpecName: "scripts") pod "64f062b9-8008-483b-b61c-07c621b06e67" (UID: "64f062b9-8008-483b-b61c-07c621b06e67"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.949678 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64f062b9-8008-483b-b61c-07c621b06e67-kube-api-access-5wqdp" (OuterVolumeSpecName: "kube-api-access-5wqdp") pod "64f062b9-8008-483b-b61c-07c621b06e67" (UID: "64f062b9-8008-483b-b61c-07c621b06e67"). InnerVolumeSpecName "kube-api-access-5wqdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.972716 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64f062b9-8008-483b-b61c-07c621b06e67" (UID: "64f062b9-8008-483b-b61c-07c621b06e67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:44 crc kubenswrapper[4879]: I1125 14:52:44.987675 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-config-data" (OuterVolumeSpecName: "config-data") pod "64f062b9-8008-483b-b61c-07c621b06e67" (UID: "64f062b9-8008-483b-b61c-07c621b06e67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.022014 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wqdp\" (UniqueName: \"kubernetes.io/projected/64f062b9-8008-483b-b61c-07c621b06e67-kube-api-access-5wqdp\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.022062 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.022077 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.022090 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64f062b9-8008-483b-b61c-07c621b06e67-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.323078 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerStarted","Data":"4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4"} Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.323541 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.333411 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w29sx" event={"ID":"64f062b9-8008-483b-b61c-07c621b06e67","Type":"ContainerDied","Data":"59ac9fe6a31c8e270357539b4c8f95a49b97f91f1643d79fc61110a758f4cdba"} Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.333488 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59ac9fe6a31c8e270357539b4c8f95a49b97f91f1643d79fc61110a758f4cdba" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.333504 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w29sx" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.354051 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.935355429 podStartE2EDuration="7.354033565s" podCreationTimestamp="2025-11-25 14:52:38 +0000 UTC" firstStartedPulling="2025-11-25 14:52:39.198159813 +0000 UTC m=+1650.801572884" lastFinishedPulling="2025-11-25 14:52:44.616837949 +0000 UTC m=+1656.220251020" observedRunningTime="2025-11-25 14:52:45.353170212 +0000 UTC m=+1656.956583293" watchObservedRunningTime="2025-11-25 14:52:45.354033565 +0000 UTC m=+1656.957446636" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.481426 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.481838 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-log" containerID="cri-o://0c341d493c2ea79312086f53b3151a095683fbeccc1f0a0e85bd205019c59b7e" gracePeriod=30 Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.481926 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-api" containerID="cri-o://56898260aedf25481718571ae793d987a939f42ccba1bcf6b9440d2851fec5eb" gracePeriod=30 Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.495409 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.495603 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b4f5462a-a253-4d98-a059-b429cc926dff" containerName="nova-scheduler-scheduler" containerID="cri-o://7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9" gracePeriod=30 Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.514886 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.515146 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-log" containerID="cri-o://8e873910359e08effd1cdd6f5e4f7ed9dde06a82dfb691ba4ccdd34b3b0cf683" gracePeriod=30 Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.515246 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-metadata" containerID="cri-o://47da48436bd6446c30fdb8cb9e50ce1df6a2efb7ac0b149c20f95c06171f1513" gracePeriod=30 Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.614753 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.615026 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 14:52:45 crc kubenswrapper[4879]: I1125 14:52:45.663814 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" path="/var/lib/kubelet/pods/b8ec0861-1e73-4738-8c15-157fbc5418d4/volumes" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.351546 4879 generic.go:334] "Generic (PLEG): container finished" podID="d94b075f-320a-4140-9232-eb6b6db0228e" containerID="47da48436bd6446c30fdb8cb9e50ce1df6a2efb7ac0b149c20f95c06171f1513" exitCode=0 Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.351897 4879 generic.go:334] "Generic (PLEG): container finished" podID="d94b075f-320a-4140-9232-eb6b6db0228e" containerID="8e873910359e08effd1cdd6f5e4f7ed9dde06a82dfb691ba4ccdd34b3b0cf683" exitCode=143 Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.351651 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d94b075f-320a-4140-9232-eb6b6db0228e","Type":"ContainerDied","Data":"47da48436bd6446c30fdb8cb9e50ce1df6a2efb7ac0b149c20f95c06171f1513"} Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.351952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d94b075f-320a-4140-9232-eb6b6db0228e","Type":"ContainerDied","Data":"8e873910359e08effd1cdd6f5e4f7ed9dde06a82dfb691ba4ccdd34b3b0cf683"} Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.356888 4879 generic.go:334] "Generic (PLEG): container finished" podID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerID="0c341d493c2ea79312086f53b3151a095683fbeccc1f0a0e85bd205019c59b7e" exitCode=143 Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.356992 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b","Type":"ContainerDied","Data":"0c341d493c2ea79312086f53b3151a095683fbeccc1f0a0e85bd205019c59b7e"} Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.680552 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.762684 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d94b075f-320a-4140-9232-eb6b6db0228e-logs\") pod \"d94b075f-320a-4140-9232-eb6b6db0228e\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.763036 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-config-data\") pod \"d94b075f-320a-4140-9232-eb6b6db0228e\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.763108 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-combined-ca-bundle\") pod \"d94b075f-320a-4140-9232-eb6b6db0228e\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.763198 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d94b075f-320a-4140-9232-eb6b6db0228e-logs" (OuterVolumeSpecName: "logs") pod "d94b075f-320a-4140-9232-eb6b6db0228e" (UID: "d94b075f-320a-4140-9232-eb6b6db0228e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.763246 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlp7m\" (UniqueName: \"kubernetes.io/projected/d94b075f-320a-4140-9232-eb6b6db0228e-kube-api-access-wlp7m\") pod \"d94b075f-320a-4140-9232-eb6b6db0228e\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.763332 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-nova-metadata-tls-certs\") pod \"d94b075f-320a-4140-9232-eb6b6db0228e\" (UID: \"d94b075f-320a-4140-9232-eb6b6db0228e\") " Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.764351 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d94b075f-320a-4140-9232-eb6b6db0228e-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.789492 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d94b075f-320a-4140-9232-eb6b6db0228e-kube-api-access-wlp7m" (OuterVolumeSpecName: "kube-api-access-wlp7m") pod "d94b075f-320a-4140-9232-eb6b6db0228e" (UID: "d94b075f-320a-4140-9232-eb6b6db0228e"). InnerVolumeSpecName "kube-api-access-wlp7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.802638 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-config-data" (OuterVolumeSpecName: "config-data") pod "d94b075f-320a-4140-9232-eb6b6db0228e" (UID: "d94b075f-320a-4140-9232-eb6b6db0228e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.824913 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d94b075f-320a-4140-9232-eb6b6db0228e" (UID: "d94b075f-320a-4140-9232-eb6b6db0228e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.842053 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d94b075f-320a-4140-9232-eb6b6db0228e" (UID: "d94b075f-320a-4140-9232-eb6b6db0228e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.866019 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlp7m\" (UniqueName: \"kubernetes.io/projected/d94b075f-320a-4140-9232-eb6b6db0228e-kube-api-access-wlp7m\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.866062 4879 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.866074 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:46 crc kubenswrapper[4879]: I1125 14:52:46.866086 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d94b075f-320a-4140-9232-eb6b6db0228e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.373167 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.373156 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d94b075f-320a-4140-9232-eb6b6db0228e","Type":"ContainerDied","Data":"debfd33d47fa12c57241e253159fd381815a42e88d804d1f1356e812b062e922"} Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.373449 4879 scope.go:117] "RemoveContainer" containerID="47da48436bd6446c30fdb8cb9e50ce1df6a2efb7ac0b149c20f95c06171f1513" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.376386 4879 generic.go:334] "Generic (PLEG): container finished" podID="20c70c15-43aa-44ee-8a6c-ae31b460ac5e" containerID="34f47d04d59bb556c83ac90c7fd44769f03020a5c2534c1b206563b49dfba9f1" exitCode=0 Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.376453 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qncf" event={"ID":"20c70c15-43aa-44ee-8a6c-ae31b460ac5e","Type":"ContainerDied","Data":"34f47d04d59bb556c83ac90c7fd44769f03020a5c2534c1b206563b49dfba9f1"} Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.409078 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.409176 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.409273 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.409552 4879 scope.go:117] "RemoveContainer" containerID="8e873910359e08effd1cdd6f5e4f7ed9dde06a82dfb691ba4ccdd34b3b0cf683" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.410663 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.410754 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" gracePeriod=600 Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.448752 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.470413 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.481505 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.482393 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-metadata" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482423 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-metadata" Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.482444 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerName="dnsmasq-dns" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482453 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerName="dnsmasq-dns" Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.482464 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64f062b9-8008-483b-b61c-07c621b06e67" containerName="nova-manage" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482470 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="64f062b9-8008-483b-b61c-07c621b06e67" containerName="nova-manage" Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.482485 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerName="init" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482494 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerName="init" Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.482523 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-log" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482529 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-log" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482795 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8ec0861-1e73-4738-8c15-157fbc5418d4" containerName="dnsmasq-dns" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482822 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-log" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482848 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" containerName="nova-metadata-metadata" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.482866 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="64f062b9-8008-483b-b61c-07c621b06e67" containerName="nova-manage" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.484623 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.487678 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.488433 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.492834 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.538749 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.540505 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.543028 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.543109 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="b4f5462a-a253-4d98-a059-b429cc926dff" containerName="nova-scheduler-scheduler" Nov 25 14:52:47 crc kubenswrapper[4879]: E1125 14:52:47.544175 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.580401 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkb2w\" (UniqueName: \"kubernetes.io/projected/7299d2c6-54b2-4989-a78d-15d0ec713546-kube-api-access-bkb2w\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.580489 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.580526 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.580576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-config-data\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.580763 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7299d2c6-54b2-4989-a78d-15d0ec713546-logs\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.660361 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d94b075f-320a-4140-9232-eb6b6db0228e" path="/var/lib/kubelet/pods/d94b075f-320a-4140-9232-eb6b6db0228e/volumes" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.682568 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.682682 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-config-data\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.682730 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7299d2c6-54b2-4989-a78d-15d0ec713546-logs\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.682918 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkb2w\" (UniqueName: \"kubernetes.io/projected/7299d2c6-54b2-4989-a78d-15d0ec713546-kube-api-access-bkb2w\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.682994 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.683537 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7299d2c6-54b2-4989-a78d-15d0ec713546-logs\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.687688 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.689737 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.692234 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-config-data\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.703627 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkb2w\" (UniqueName: \"kubernetes.io/projected/7299d2c6-54b2-4989-a78d-15d0ec713546-kube-api-access-bkb2w\") pod \"nova-metadata-0\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " pod="openstack/nova-metadata-0" Nov 25 14:52:47 crc kubenswrapper[4879]: I1125 14:52:47.808642 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.299856 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.412754 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" exitCode=0 Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.413390 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5"} Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.413509 4879 scope.go:117] "RemoveContainer" containerID="c5933a5c56b3055e4c7db8e0eb320d1e1133844e1dbe1178c77e8edc7456d7cd" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.415430 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:52:48 crc kubenswrapper[4879]: E1125 14:52:48.416051 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.417684 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7299d2c6-54b2-4989-a78d-15d0ec713546","Type":"ContainerStarted","Data":"906e20b47cb04ba1cc98aa0bd5811af2aac253119c8048343095c24bbcdf522c"} Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.834507 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.918400 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-combined-ca-bundle\") pod \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.918493 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-config-data\") pod \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.918521 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhn2v\" (UniqueName: \"kubernetes.io/projected/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-kube-api-access-qhn2v\") pod \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.918542 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-scripts\") pod \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\" (UID: \"20c70c15-43aa-44ee-8a6c-ae31b460ac5e\") " Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.927401 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-kube-api-access-qhn2v" (OuterVolumeSpecName: "kube-api-access-qhn2v") pod "20c70c15-43aa-44ee-8a6c-ae31b460ac5e" (UID: "20c70c15-43aa-44ee-8a6c-ae31b460ac5e"). InnerVolumeSpecName "kube-api-access-qhn2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.927399 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-scripts" (OuterVolumeSpecName: "scripts") pod "20c70c15-43aa-44ee-8a6c-ae31b460ac5e" (UID: "20c70c15-43aa-44ee-8a6c-ae31b460ac5e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.952729 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20c70c15-43aa-44ee-8a6c-ae31b460ac5e" (UID: "20c70c15-43aa-44ee-8a6c-ae31b460ac5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:48 crc kubenswrapper[4879]: I1125 14:52:48.959552 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-config-data" (OuterVolumeSpecName: "config-data") pod "20c70c15-43aa-44ee-8a6c-ae31b460ac5e" (UID: "20c70c15-43aa-44ee-8a6c-ae31b460ac5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.020683 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.020741 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.020755 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhn2v\" (UniqueName: \"kubernetes.io/projected/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-kube-api-access-qhn2v\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.020768 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20c70c15-43aa-44ee-8a6c-ae31b460ac5e-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.433585 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6qncf" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.433553 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6qncf" event={"ID":"20c70c15-43aa-44ee-8a6c-ae31b460ac5e","Type":"ContainerDied","Data":"630a493a4855123d3e85d3f2ca57d8457f5b31bb588e230df2e474a57b69e0f1"} Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.434416 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="630a493a4855123d3e85d3f2ca57d8457f5b31bb588e230df2e474a57b69e0f1" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.441613 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7299d2c6-54b2-4989-a78d-15d0ec713546","Type":"ContainerStarted","Data":"7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301"} Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.441663 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7299d2c6-54b2-4989-a78d-15d0ec713546","Type":"ContainerStarted","Data":"6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b"} Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.471094 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.4710706 podStartE2EDuration="2.4710706s" podCreationTimestamp="2025-11-25 14:52:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:49.47074587 +0000 UTC m=+1661.074158951" watchObservedRunningTime="2025-11-25 14:52:49.4710706 +0000 UTC m=+1661.074483681" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.503159 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 14:52:49 crc kubenswrapper[4879]: E1125 14:52:49.506388 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20c70c15-43aa-44ee-8a6c-ae31b460ac5e" containerName="nova-cell1-conductor-db-sync" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.506430 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="20c70c15-43aa-44ee-8a6c-ae31b460ac5e" containerName="nova-cell1-conductor-db-sync" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.506818 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="20c70c15-43aa-44ee-8a6c-ae31b460ac5e" containerName="nova-cell1-conductor-db-sync" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.507823 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.510242 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.524270 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.529017 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.529060 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.529800 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffzpw\" (UniqueName: \"kubernetes.io/projected/ee22f7f3-e4e8-4166-87bd-ca7833654410-kube-api-access-ffzpw\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.632170 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffzpw\" (UniqueName: \"kubernetes.io/projected/ee22f7f3-e4e8-4166-87bd-ca7833654410-kube-api-access-ffzpw\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.632374 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.632412 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.637558 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.660950 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.671958 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffzpw\" (UniqueName: \"kubernetes.io/projected/ee22f7f3-e4e8-4166-87bd-ca7833654410-kube-api-access-ffzpw\") pod \"nova-cell1-conductor-0\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:49 crc kubenswrapper[4879]: I1125 14:52:49.828106 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:50.354280 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:50.486110 4879 generic.go:334] "Generic (PLEG): container finished" podID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerID="56898260aedf25481718571ae793d987a939f42ccba1bcf6b9440d2851fec5eb" exitCode=0 Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:50.486436 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b","Type":"ContainerDied","Data":"56898260aedf25481718571ae793d987a939f42ccba1bcf6b9440d2851fec5eb"} Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:50.494479 4879 generic.go:334] "Generic (PLEG): container finished" podID="b4f5462a-a253-4d98-a059-b429cc926dff" containerID="7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9" exitCode=0 Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:50.494547 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b4f5462a-a253-4d98-a059-b429cc926dff","Type":"ContainerDied","Data":"7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9"} Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:50.506592 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ee22f7f3-e4e8-4166-87bd-ca7833654410","Type":"ContainerStarted","Data":"9ea62df33810e22ab4c765b9f094855bb5651dd4acd4a54732575dd34089cdd9"} Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:51.519535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ee22f7f3-e4e8-4166-87bd-ca7833654410","Type":"ContainerStarted","Data":"de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966"} Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:51.521288 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 25 14:52:51 crc kubenswrapper[4879]: I1125 14:52:51.545560 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.545536314 podStartE2EDuration="2.545536314s" podCreationTimestamp="2025-11-25 14:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:51.539269636 +0000 UTC m=+1663.142682717" watchObservedRunningTime="2025-11-25 14:52:51.545536314 +0000 UTC m=+1663.148949385" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.232784 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.241317 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.394454 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-combined-ca-bundle\") pod \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.394578 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-config-data\") pod \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.394643 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-config-data\") pod \"b4f5462a-a253-4d98-a059-b429cc926dff\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.394692 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6lkf\" (UniqueName: \"kubernetes.io/projected/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-kube-api-access-l6lkf\") pod \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.394747 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-logs\") pod \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\" (UID: \"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.394848 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-combined-ca-bundle\") pod \"b4f5462a-a253-4d98-a059-b429cc926dff\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.395533 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-logs" (OuterVolumeSpecName: "logs") pod "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" (UID: "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.395683 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4tx7\" (UniqueName: \"kubernetes.io/projected/b4f5462a-a253-4d98-a059-b429cc926dff-kube-api-access-h4tx7\") pod \"b4f5462a-a253-4d98-a059-b429cc926dff\" (UID: \"b4f5462a-a253-4d98-a059-b429cc926dff\") " Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.397051 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.405459 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-kube-api-access-l6lkf" (OuterVolumeSpecName: "kube-api-access-l6lkf") pod "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" (UID: "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b"). InnerVolumeSpecName "kube-api-access-l6lkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.405828 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4f5462a-a253-4d98-a059-b429cc926dff-kube-api-access-h4tx7" (OuterVolumeSpecName: "kube-api-access-h4tx7") pod "b4f5462a-a253-4d98-a059-b429cc926dff" (UID: "b4f5462a-a253-4d98-a059-b429cc926dff"). InnerVolumeSpecName "kube-api-access-h4tx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.430216 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4f5462a-a253-4d98-a059-b429cc926dff" (UID: "b4f5462a-a253-4d98-a059-b429cc926dff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.430891 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-config-data" (OuterVolumeSpecName: "config-data") pod "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" (UID: "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.432820 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" (UID: "1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.437833 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-config-data" (OuterVolumeSpecName: "config-data") pod "b4f5462a-a253-4d98-a059-b429cc926dff" (UID: "b4f5462a-a253-4d98-a059-b429cc926dff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.498742 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4tx7\" (UniqueName: \"kubernetes.io/projected/b4f5462a-a253-4d98-a059-b429cc926dff-kube-api-access-h4tx7\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.498793 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.498809 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.498823 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.498835 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6lkf\" (UniqueName: \"kubernetes.io/projected/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b-kube-api-access-l6lkf\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.498845 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4f5462a-a253-4d98-a059-b429cc926dff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.535147 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b","Type":"ContainerDied","Data":"f86e7122d166362f7caeb96ea094d234b1f371afa2831ba40a46a570d90b355c"} Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.535200 4879 scope.go:117] "RemoveContainer" containerID="56898260aedf25481718571ae793d987a939f42ccba1bcf6b9440d2851fec5eb" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.535319 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.549200 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.549462 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b4f5462a-a253-4d98-a059-b429cc926dff","Type":"ContainerDied","Data":"3bd65040c84eff20d2fd7822238c4f11a61c7f617572a80c5316026379c706e9"} Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.597284 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.607219 4879 scope.go:117] "RemoveContainer" containerID="0c341d493c2ea79312086f53b3151a095683fbeccc1f0a0e85bd205019c59b7e" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.647806 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.650762 4879 scope.go:117] "RemoveContainer" containerID="7c05331ab865a644d1796e5a3922a64b3099ab39563f347c184bf50023e34bb9" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.659808 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.670916 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.683175 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: E1125 14:52:52.683985 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-log" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.684019 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-log" Nov 25 14:52:52 crc kubenswrapper[4879]: E1125 14:52:52.684089 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f5462a-a253-4d98-a059-b429cc926dff" containerName="nova-scheduler-scheduler" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.684100 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f5462a-a253-4d98-a059-b429cc926dff" containerName="nova-scheduler-scheduler" Nov 25 14:52:52 crc kubenswrapper[4879]: E1125 14:52:52.684117 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-api" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.684139 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-api" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.684390 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4f5462a-a253-4d98-a059-b429cc926dff" containerName="nova-scheduler-scheduler" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.684433 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-log" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.684446 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" containerName="nova-api-api" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.686005 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.689521 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.700754 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.714113 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.716554 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.719859 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.728890 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.809890 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.809984 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.812266 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.812398 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-config-data\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.812491 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/908eba86-8426-4692-beb1-c6a2324466f2-logs\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.812527 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmb5k\" (UniqueName: \"kubernetes.io/projected/908eba86-8426-4692-beb1-c6a2324466f2-kube-api-access-gmb5k\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914030 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-config-data\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914185 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914234 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-config-data\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914337 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/908eba86-8426-4692-beb1-c6a2324466f2-logs\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914382 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtr78\" (UniqueName: \"kubernetes.io/projected/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-kube-api-access-gtr78\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914421 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmb5k\" (UniqueName: \"kubernetes.io/projected/908eba86-8426-4692-beb1-c6a2324466f2-kube-api-access-gmb5k\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.914475 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.915185 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/908eba86-8426-4692-beb1-c6a2324466f2-logs\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.918762 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-config-data\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.919263 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:52 crc kubenswrapper[4879]: I1125 14:52:52.932586 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmb5k\" (UniqueName: \"kubernetes.io/projected/908eba86-8426-4692-beb1-c6a2324466f2-kube-api-access-gmb5k\") pod \"nova-api-0\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " pod="openstack/nova-api-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.006477 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.016221 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtr78\" (UniqueName: \"kubernetes.io/projected/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-kube-api-access-gtr78\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.016341 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-config-data\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.016418 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.020396 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-config-data\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.020643 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.042768 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtr78\" (UniqueName: \"kubernetes.io/projected/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-kube-api-access-gtr78\") pod \"nova-scheduler-0\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.339654 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.502813 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.563514 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"908eba86-8426-4692-beb1-c6a2324466f2","Type":"ContainerStarted","Data":"b740c66b4bb25869ddac27cdb8b6cedb5922d4671792b285972a54c249f36350"} Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.687519 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b" path="/var/lib/kubelet/pods/1e765f4b-ebe3-47c0-9a96-a0a5b8b7a36b/volumes" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.689378 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4f5462a-a253-4d98-a059-b429cc926dff" path="/var/lib/kubelet/pods/b4f5462a-a253-4d98-a059-b429cc926dff/volumes" Nov 25 14:52:53 crc kubenswrapper[4879]: I1125 14:52:53.830332 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:52:53 crc kubenswrapper[4879]: W1125 14:52:53.833906 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9b40c6c_3ff1_4986_9d35_63603cf1af8e.slice/crio-e6f1fae5eb699fac185426515382b93bca6a27041889b295be0e3520d896814b WatchSource:0}: Error finding container e6f1fae5eb699fac185426515382b93bca6a27041889b295be0e3520d896814b: Status 404 returned error can't find the container with id e6f1fae5eb699fac185426515382b93bca6a27041889b295be0e3520d896814b Nov 25 14:52:54 crc kubenswrapper[4879]: I1125 14:52:54.577086 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9b40c6c-3ff1-4986-9d35-63603cf1af8e","Type":"ContainerStarted","Data":"1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433"} Nov 25 14:52:54 crc kubenswrapper[4879]: I1125 14:52:54.577496 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9b40c6c-3ff1-4986-9d35-63603cf1af8e","Type":"ContainerStarted","Data":"e6f1fae5eb699fac185426515382b93bca6a27041889b295be0e3520d896814b"} Nov 25 14:52:54 crc kubenswrapper[4879]: I1125 14:52:54.579895 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"908eba86-8426-4692-beb1-c6a2324466f2","Type":"ContainerStarted","Data":"0b9111ec02906e676e584643f219642556ee035362674b3003db27461cf5764b"} Nov 25 14:52:54 crc kubenswrapper[4879]: I1125 14:52:54.579928 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"908eba86-8426-4692-beb1-c6a2324466f2","Type":"ContainerStarted","Data":"b9cf5bb919cbcc48a7c785effcc107816382503b945bcf102d9aa8e026d06aca"} Nov 25 14:52:54 crc kubenswrapper[4879]: I1125 14:52:54.602462 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.602437498 podStartE2EDuration="2.602437498s" podCreationTimestamp="2025-11-25 14:52:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:54.595904382 +0000 UTC m=+1666.199317463" watchObservedRunningTime="2025-11-25 14:52:54.602437498 +0000 UTC m=+1666.205850569" Nov 25 14:52:54 crc kubenswrapper[4879]: I1125 14:52:54.615389 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.615368534 podStartE2EDuration="2.615368534s" podCreationTimestamp="2025-11-25 14:52:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:52:54.613228446 +0000 UTC m=+1666.216641537" watchObservedRunningTime="2025-11-25 14:52:54.615368534 +0000 UTC m=+1666.218781605" Nov 25 14:52:57 crc kubenswrapper[4879]: I1125 14:52:57.811360 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 14:52:57 crc kubenswrapper[4879]: I1125 14:52:57.811687 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 14:52:58 crc kubenswrapper[4879]: I1125 14:52:58.340046 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 14:52:58 crc kubenswrapper[4879]: I1125 14:52:58.823319 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 25 14:52:58 crc kubenswrapper[4879]: I1125 14:52:58.824299 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:52:59 crc kubenswrapper[4879]: I1125 14:52:59.858660 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 25 14:53:03 crc kubenswrapper[4879]: I1125 14:53:03.007622 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 14:53:03 crc kubenswrapper[4879]: I1125 14:53:03.008001 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 14:53:03 crc kubenswrapper[4879]: I1125 14:53:03.340385 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 25 14:53:03 crc kubenswrapper[4879]: I1125 14:53:03.366449 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 25 14:53:03 crc kubenswrapper[4879]: I1125 14:53:03.645673 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:53:03 crc kubenswrapper[4879]: E1125 14:53:03.645979 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:53:03 crc kubenswrapper[4879]: I1125 14:53:03.711860 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 25 14:53:04 crc kubenswrapper[4879]: I1125 14:53:04.049387 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:53:04 crc kubenswrapper[4879]: I1125 14:53:04.049413 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 14:53:07 crc kubenswrapper[4879]: I1125 14:53:07.813874 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 14:53:07 crc kubenswrapper[4879]: I1125 14:53:07.818042 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 14:53:07 crc kubenswrapper[4879]: I1125 14:53:07.826038 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.580392 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.648255 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.730133 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-config-data\") pod \"e9df1834-6236-443d-8662-4da48203efac\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.730579 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-combined-ca-bundle\") pod \"e9df1834-6236-443d-8662-4da48203efac\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.730639 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nf6c\" (UniqueName: \"kubernetes.io/projected/e9df1834-6236-443d-8662-4da48203efac-kube-api-access-6nf6c\") pod \"e9df1834-6236-443d-8662-4da48203efac\" (UID: \"e9df1834-6236-443d-8662-4da48203efac\") " Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.758422 4879 generic.go:334] "Generic (PLEG): container finished" podID="e9df1834-6236-443d-8662-4da48203efac" containerID="3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb" exitCode=137 Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.760269 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.760299 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9df1834-6236-443d-8662-4da48203efac","Type":"ContainerDied","Data":"3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb"} Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.760360 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e9df1834-6236-443d-8662-4da48203efac","Type":"ContainerDied","Data":"4d8ad6e1dbe9f0dac069f7feb9fdd67a3936b18ff80981636645cc8fbab1b9bf"} Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.760380 4879 scope.go:117] "RemoveContainer" containerID="3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.767284 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9df1834-6236-443d-8662-4da48203efac-kube-api-access-6nf6c" (OuterVolumeSpecName: "kube-api-access-6nf6c") pod "e9df1834-6236-443d-8662-4da48203efac" (UID: "e9df1834-6236-443d-8662-4da48203efac"). InnerVolumeSpecName "kube-api-access-6nf6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.789284 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9df1834-6236-443d-8662-4da48203efac" (UID: "e9df1834-6236-443d-8662-4da48203efac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.789920 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.799967 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-config-data" (OuterVolumeSpecName: "config-data") pod "e9df1834-6236-443d-8662-4da48203efac" (UID: "e9df1834-6236-443d-8662-4da48203efac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.834606 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.834641 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9df1834-6236-443d-8662-4da48203efac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.834655 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nf6c\" (UniqueName: \"kubernetes.io/projected/e9df1834-6236-443d-8662-4da48203efac-kube-api-access-6nf6c\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.894649 4879 scope.go:117] "RemoveContainer" containerID="3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb" Nov 25 14:53:08 crc kubenswrapper[4879]: E1125 14:53:08.895376 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb\": container with ID starting with 3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb not found: ID does not exist" containerID="3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb" Nov 25 14:53:08 crc kubenswrapper[4879]: I1125 14:53:08.895448 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb"} err="failed to get container status \"3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb\": rpc error: code = NotFound desc = could not find container \"3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb\": container with ID starting with 3557b636f3623987bc4468411a15dd6361c3244b8525b8a7242d5909b5e991bb not found: ID does not exist" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.103118 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.114901 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.137794 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:53:09 crc kubenswrapper[4879]: E1125 14:53:09.138337 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9df1834-6236-443d-8662-4da48203efac" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.138362 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9df1834-6236-443d-8662-4da48203efac" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.138574 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9df1834-6236-443d-8662-4da48203efac" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.139240 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.142484 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.143214 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.143454 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.156656 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.240819 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.240876 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55hnv\" (UniqueName: \"kubernetes.io/projected/a901e0fb-8403-4e8d-a1b1-b3ccae942552-kube-api-access-55hnv\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.240908 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.240957 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.241152 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.343119 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.343188 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55hnv\" (UniqueName: \"kubernetes.io/projected/a901e0fb-8403-4e8d-a1b1-b3ccae942552-kube-api-access-55hnv\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.343220 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.343264 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.343296 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.348807 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.348896 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.348911 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.349321 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.363664 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55hnv\" (UniqueName: \"kubernetes.io/projected/a901e0fb-8403-4e8d-a1b1-b3ccae942552-kube-api-access-55hnv\") pod \"nova-cell1-novncproxy-0\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.456988 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.665692 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9df1834-6236-443d-8662-4da48203efac" path="/var/lib/kubelet/pods/e9df1834-6236-443d-8662-4da48203efac/volumes" Nov 25 14:53:09 crc kubenswrapper[4879]: W1125 14:53:09.902071 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda901e0fb_8403_4e8d_a1b1_b3ccae942552.slice/crio-6eceb79f9c3cc43c5da1cd704fe533122923636aab23d279059baefecaeebc48 WatchSource:0}: Error finding container 6eceb79f9c3cc43c5da1cd704fe533122923636aab23d279059baefecaeebc48: Status 404 returned error can't find the container with id 6eceb79f9c3cc43c5da1cd704fe533122923636aab23d279059baefecaeebc48 Nov 25 14:53:09 crc kubenswrapper[4879]: I1125 14:53:09.903468 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:53:10 crc kubenswrapper[4879]: I1125 14:53:10.782522 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a901e0fb-8403-4e8d-a1b1-b3ccae942552","Type":"ContainerStarted","Data":"af2731b4880c2c108e4f64495391dcd175a79df8df0d6cf81c3c2064f645d782"} Nov 25 14:53:10 crc kubenswrapper[4879]: I1125 14:53:10.782933 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a901e0fb-8403-4e8d-a1b1-b3ccae942552","Type":"ContainerStarted","Data":"6eceb79f9c3cc43c5da1cd704fe533122923636aab23d279059baefecaeebc48"} Nov 25 14:53:10 crc kubenswrapper[4879]: I1125 14:53:10.803626 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.803604999 podStartE2EDuration="1.803604999s" podCreationTimestamp="2025-11-25 14:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:10.797439573 +0000 UTC m=+1682.400852644" watchObservedRunningTime="2025-11-25 14:53:10.803604999 +0000 UTC m=+1682.407018070" Nov 25 14:53:13 crc kubenswrapper[4879]: I1125 14:53:13.010952 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 14:53:13 crc kubenswrapper[4879]: I1125 14:53:13.011368 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 14:53:13 crc kubenswrapper[4879]: I1125 14:53:13.011483 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 14:53:13 crc kubenswrapper[4879]: I1125 14:53:13.014176 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 14:53:13 crc kubenswrapper[4879]: I1125 14:53:13.808372 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 14:53:13 crc kubenswrapper[4879]: I1125 14:53:13.818352 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.002802 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zs5xv"] Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.004912 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.025324 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zs5xv"] Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.128281 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.128388 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.128430 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.128476 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-config\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.128508 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.128603 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-442dw\" (UniqueName: \"kubernetes.io/projected/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-kube-api-access-442dw\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.230827 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-442dw\" (UniqueName: \"kubernetes.io/projected/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-kube-api-access-442dw\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.230902 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.230974 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.231013 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.231067 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-config\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.231099 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.232165 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.233203 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.233791 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.234387 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.235171 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-config\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.263108 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-442dw\" (UniqueName: \"kubernetes.io/projected/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-kube-api-access-442dw\") pod \"dnsmasq-dns-cd5cbd7b9-zs5xv\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.341904 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.458176 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.644571 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:53:14 crc kubenswrapper[4879]: E1125 14:53:14.644879 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:53:14 crc kubenswrapper[4879]: I1125 14:53:14.858549 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zs5xv"] Nov 25 14:53:15 crc kubenswrapper[4879]: I1125 14:53:15.835420 4879 generic.go:334] "Generic (PLEG): container finished" podID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerID="44362b248851344f78dab41e04ffc895817c8ec86f541ed03552d529c19b1fa6" exitCode=0 Nov 25 14:53:15 crc kubenswrapper[4879]: I1125 14:53:15.835895 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" event={"ID":"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc","Type":"ContainerDied","Data":"44362b248851344f78dab41e04ffc895817c8ec86f541ed03552d529c19b1fa6"} Nov 25 14:53:15 crc kubenswrapper[4879]: I1125 14:53:15.836016 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" event={"ID":"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc","Type":"ContainerStarted","Data":"1f3f161b98d3430ddaf732a09247da2c3322b54ab8ef7ffbfd401643b99f026e"} Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.080432 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.080721 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-central-agent" containerID="cri-o://f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227" gracePeriod=30 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.081184 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-notification-agent" containerID="cri-o://967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373" gracePeriod=30 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.081313 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="sg-core" containerID="cri-o://53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1" gracePeriod=30 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.081415 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="proxy-httpd" containerID="cri-o://4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4" gracePeriod=30 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.846655 4879 generic.go:334] "Generic (PLEG): container finished" podID="d0c35a74-170c-493d-a037-bc7074836e43" containerID="4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4" exitCode=0 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.847046 4879 generic.go:334] "Generic (PLEG): container finished" podID="d0c35a74-170c-493d-a037-bc7074836e43" containerID="53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1" exitCode=2 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.847058 4879 generic.go:334] "Generic (PLEG): container finished" podID="d0c35a74-170c-493d-a037-bc7074836e43" containerID="f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227" exitCode=0 Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.846996 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerDied","Data":"4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4"} Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.847162 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerDied","Data":"53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1"} Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.847175 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerDied","Data":"f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227"} Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.849992 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" event={"ID":"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc","Type":"ContainerStarted","Data":"64210090c0d1d50d340e81ed21d95f311647c849f5851a5b2d1cf4494b8483ae"} Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.850161 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:16 crc kubenswrapper[4879]: I1125 14:53:16.874251 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" podStartSLOduration=3.874230258 podStartE2EDuration="3.874230258s" podCreationTimestamp="2025-11-25 14:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:16.867460467 +0000 UTC m=+1688.470873538" watchObservedRunningTime="2025-11-25 14:53:16.874230258 +0000 UTC m=+1688.477643329" Nov 25 14:53:17 crc kubenswrapper[4879]: I1125 14:53:17.051636 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:17 crc kubenswrapper[4879]: I1125 14:53:17.051864 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-log" containerID="cri-o://b9cf5bb919cbcc48a7c785effcc107816382503b945bcf102d9aa8e026d06aca" gracePeriod=30 Nov 25 14:53:17 crc kubenswrapper[4879]: I1125 14:53:17.052056 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-api" containerID="cri-o://0b9111ec02906e676e584643f219642556ee035362674b3003db27461cf5764b" gracePeriod=30 Nov 25 14:53:17 crc kubenswrapper[4879]: I1125 14:53:17.859917 4879 generic.go:334] "Generic (PLEG): container finished" podID="908eba86-8426-4692-beb1-c6a2324466f2" containerID="b9cf5bb919cbcc48a7c785effcc107816382503b945bcf102d9aa8e026d06aca" exitCode=143 Nov 25 14:53:17 crc kubenswrapper[4879]: I1125 14:53:17.860144 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"908eba86-8426-4692-beb1-c6a2324466f2","Type":"ContainerDied","Data":"b9cf5bb919cbcc48a7c785effcc107816382503b945bcf102d9aa8e026d06aca"} Nov 25 14:53:19 crc kubenswrapper[4879]: I1125 14:53:19.457574 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:19 crc kubenswrapper[4879]: I1125 14:53:19.477821 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:19 crc kubenswrapper[4879]: I1125 14:53:19.905763 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.057526 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-2xzrl"] Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.059098 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.060823 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.067501 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.068085 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2xzrl"] Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.170433 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2lmm\" (UniqueName: \"kubernetes.io/projected/9620e674-5089-4560-8b81-cf2399d0ae7c-kube-api-access-g2lmm\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.171047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.171207 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-scripts\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.171398 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-config-data\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.272775 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-config-data\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.273091 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2lmm\" (UniqueName: \"kubernetes.io/projected/9620e674-5089-4560-8b81-cf2399d0ae7c-kube-api-access-g2lmm\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.273350 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.273604 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-scripts\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.280271 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-scripts\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.280638 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.281762 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-config-data\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.294817 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2lmm\" (UniqueName: \"kubernetes.io/projected/9620e674-5089-4560-8b81-cf2399d0ae7c-kube-api-access-g2lmm\") pod \"nova-cell1-cell-mapping-2xzrl\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.568570 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.669561 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.784845 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-combined-ca-bundle\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.784907 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsbws\" (UniqueName: \"kubernetes.io/projected/d0c35a74-170c-493d-a037-bc7074836e43-kube-api-access-jsbws\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.784969 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-scripts\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.784995 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-ceilometer-tls-certs\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.785113 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-sg-core-conf-yaml\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.785233 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-config-data\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.785281 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-run-httpd\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.785352 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-log-httpd\") pod \"d0c35a74-170c-493d-a037-bc7074836e43\" (UID: \"d0c35a74-170c-493d-a037-bc7074836e43\") " Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.791584 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.791757 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.830382 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-scripts" (OuterVolumeSpecName: "scripts") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.837631 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0c35a74-170c-493d-a037-bc7074836e43-kube-api-access-jsbws" (OuterVolumeSpecName: "kube-api-access-jsbws") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "kube-api-access-jsbws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.884719 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.889111 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsbws\" (UniqueName: \"kubernetes.io/projected/d0c35a74-170c-493d-a037-bc7074836e43-kube-api-access-jsbws\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.889165 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.889175 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.889233 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.889244 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d0c35a74-170c-493d-a037-bc7074836e43-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.907980 4879 generic.go:334] "Generic (PLEG): container finished" podID="d0c35a74-170c-493d-a037-bc7074836e43" containerID="967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373" exitCode=0 Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.908055 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerDied","Data":"967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373"} Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.908088 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d0c35a74-170c-493d-a037-bc7074836e43","Type":"ContainerDied","Data":"22e5ff2b183ae6908c001765c3d5a1be6fee3b18b38c883bf1d575101d5aede0"} Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.908107 4879 scope.go:117] "RemoveContainer" containerID="4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.908258 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.916538 4879 generic.go:334] "Generic (PLEG): container finished" podID="908eba86-8426-4692-beb1-c6a2324466f2" containerID="0b9111ec02906e676e584643f219642556ee035362674b3003db27461cf5764b" exitCode=0 Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.917418 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"908eba86-8426-4692-beb1-c6a2324466f2","Type":"ContainerDied","Data":"0b9111ec02906e676e584643f219642556ee035362674b3003db27461cf5764b"} Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.920678 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.952224 4879 scope.go:117] "RemoveContainer" containerID="53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.964509 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.988678 4879 scope.go:117] "RemoveContainer" containerID="967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.991399 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:20 crc kubenswrapper[4879]: I1125 14:53:20.991432 4879 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.010333 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-config-data" (OuterVolumeSpecName: "config-data") pod "d0c35a74-170c-493d-a037-bc7074836e43" (UID: "d0c35a74-170c-493d-a037-bc7074836e43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.016172 4879 scope.go:117] "RemoveContainer" containerID="f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.039041 4879 scope.go:117] "RemoveContainer" containerID="4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.039420 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4\": container with ID starting with 4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4 not found: ID does not exist" containerID="4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.039451 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4"} err="failed to get container status \"4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4\": rpc error: code = NotFound desc = could not find container \"4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4\": container with ID starting with 4a7aee2bbc7652189152b52a996cb6d675b8176e7d3b1de721d108ea8399cbf4 not found: ID does not exist" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.039473 4879 scope.go:117] "RemoveContainer" containerID="53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.039894 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1\": container with ID starting with 53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1 not found: ID does not exist" containerID="53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.039965 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1"} err="failed to get container status \"53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1\": rpc error: code = NotFound desc = could not find container \"53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1\": container with ID starting with 53a3d61f0782f9f1d8cc2cd9f5dea87f88eb86d112d3c0b76164f3596b00efe1 not found: ID does not exist" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.040000 4879 scope.go:117] "RemoveContainer" containerID="967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.040389 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373\": container with ID starting with 967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373 not found: ID does not exist" containerID="967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.040421 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373"} err="failed to get container status \"967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373\": rpc error: code = NotFound desc = could not find container \"967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373\": container with ID starting with 967e49df88fa1d9d59857cfb6722bdad320f8c394e8e5a864c32628fc1637373 not found: ID does not exist" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.040437 4879 scope.go:117] "RemoveContainer" containerID="f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.040686 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227\": container with ID starting with f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227 not found: ID does not exist" containerID="f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.040713 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227"} err="failed to get container status \"f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227\": rpc error: code = NotFound desc = could not find container \"f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227\": container with ID starting with f193596ebff42148e4165975fdf37595b89b4561583c0eeaa579d5fca0060227 not found: ID does not exist" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.093305 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0c35a74-170c-493d-a037-bc7074836e43-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.174935 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2xzrl"] Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.356578 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.375689 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.398329 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.399584 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmb5k\" (UniqueName: \"kubernetes.io/projected/908eba86-8426-4692-beb1-c6a2324466f2-kube-api-access-gmb5k\") pod \"908eba86-8426-4692-beb1-c6a2324466f2\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.399859 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/908eba86-8426-4692-beb1-c6a2324466f2-logs\") pod \"908eba86-8426-4692-beb1-c6a2324466f2\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.400009 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-config-data\") pod \"908eba86-8426-4692-beb1-c6a2324466f2\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.400049 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-combined-ca-bundle\") pod \"908eba86-8426-4692-beb1-c6a2324466f2\" (UID: \"908eba86-8426-4692-beb1-c6a2324466f2\") " Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.400546 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/908eba86-8426-4692-beb1-c6a2324466f2-logs" (OuterVolumeSpecName: "logs") pod "908eba86-8426-4692-beb1-c6a2324466f2" (UID: "908eba86-8426-4692-beb1-c6a2324466f2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.410376 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/908eba86-8426-4692-beb1-c6a2324466f2-kube-api-access-gmb5k" (OuterVolumeSpecName: "kube-api-access-gmb5k") pod "908eba86-8426-4692-beb1-c6a2324466f2" (UID: "908eba86-8426-4692-beb1-c6a2324466f2"). InnerVolumeSpecName "kube-api-access-gmb5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.417781 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.418496 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-api" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.418586 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-api" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.418686 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="proxy-httpd" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.418838 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="proxy-httpd" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.418907 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="sg-core" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.418966 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="sg-core" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.419049 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-central-agent" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419108 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-central-agent" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.419236 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-log" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419307 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-log" Nov 25 14:53:21 crc kubenswrapper[4879]: E1125 14:53:21.419386 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-notification-agent" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419446 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-notification-agent" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419751 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-central-agent" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419834 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-log" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419899 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="proxy-httpd" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.419973 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="ceilometer-notification-agent" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.420062 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c35a74-170c-493d-a037-bc7074836e43" containerName="sg-core" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.420139 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="908eba86-8426-4692-beb1-c6a2324466f2" containerName="nova-api-api" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.422400 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.430057 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.443410 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.443954 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.455473 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.455831 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "908eba86-8426-4692-beb1-c6a2324466f2" (UID: "908eba86-8426-4692-beb1-c6a2324466f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.473929 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-config-data" (OuterVolumeSpecName: "config-data") pod "908eba86-8426-4692-beb1-c6a2324466f2" (UID: "908eba86-8426-4692-beb1-c6a2324466f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503719 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-config-data\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503840 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzhk8\" (UniqueName: \"kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503879 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503904 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503928 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503969 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-log-httpd\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.503997 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-run-httpd\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.504048 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.504145 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/908eba86-8426-4692-beb1-c6a2324466f2-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.504164 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.504180 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908eba86-8426-4692-beb1-c6a2324466f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.504195 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmb5k\" (UniqueName: \"kubernetes.io/projected/908eba86-8426-4692-beb1-c6a2324466f2-kube-api-access-gmb5k\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.606648 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.607764 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-config-data\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.608224 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzhk8\" (UniqueName: \"kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.608270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.608288 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.608304 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.608575 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-log-httpd\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.608639 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-run-httpd\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.609402 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-run-httpd\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.610165 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-log-httpd\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.613335 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-config-data\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.614993 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.615190 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.617663 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.617862 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.626605 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzhk8\" (UniqueName: \"kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8\") pod \"ceilometer-0\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.665171 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0c35a74-170c-493d-a037-bc7074836e43" path="/var/lib/kubelet/pods/d0c35a74-170c-493d-a037-bc7074836e43/volumes" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.778686 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.928227 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2xzrl" event={"ID":"9620e674-5089-4560-8b81-cf2399d0ae7c","Type":"ContainerStarted","Data":"4b176e25b3d3736f8a1ed0025839a36198bbcdf4c98a15829d74dfff88a6fc04"} Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.929096 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2xzrl" event={"ID":"9620e674-5089-4560-8b81-cf2399d0ae7c","Type":"ContainerStarted","Data":"126ec0d944042a2d20b0482d813ba5feedafb9ae1ad43d90187ed5856940a7d3"} Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.945839 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"908eba86-8426-4692-beb1-c6a2324466f2","Type":"ContainerDied","Data":"b740c66b4bb25869ddac27cdb8b6cedb5922d4671792b285972a54c249f36350"} Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.945921 4879 scope.go:117] "RemoveContainer" containerID="0b9111ec02906e676e584643f219642556ee035362674b3003db27461cf5764b" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.946077 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:21 crc kubenswrapper[4879]: I1125 14:53:21.952782 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-2xzrl" podStartSLOduration=1.9527584089999999 podStartE2EDuration="1.952758409s" podCreationTimestamp="2025-11-25 14:53:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:21.94310207 +0000 UTC m=+1693.546515161" watchObservedRunningTime="2025-11-25 14:53:21.952758409 +0000 UTC m=+1693.556171480" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.004977 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.011459 4879 scope.go:117] "RemoveContainer" containerID="b9cf5bb919cbcc48a7c785effcc107816382503b945bcf102d9aa8e026d06aca" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.031965 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.049455 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.051567 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.055068 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.055323 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.059753 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.065450 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.133254 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn6v4\" (UniqueName: \"kubernetes.io/projected/1deb337e-b170-438e-88d7-06fd5b3aa0ef-kube-api-access-sn6v4\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.133305 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1deb337e-b170-438e-88d7-06fd5b3aa0ef-logs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.133330 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-public-tls-certs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.133678 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.133872 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.133908 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-config-data\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.235862 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-public-tls-certs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.236027 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.237353 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.237391 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-config-data\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.237410 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn6v4\" (UniqueName: \"kubernetes.io/projected/1deb337e-b170-438e-88d7-06fd5b3aa0ef-kube-api-access-sn6v4\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.237437 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1deb337e-b170-438e-88d7-06fd5b3aa0ef-logs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.237788 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1deb337e-b170-438e-88d7-06fd5b3aa0ef-logs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.241277 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-public-tls-certs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.242863 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.242906 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.254646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-config-data\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.274628 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn6v4\" (UniqueName: \"kubernetes.io/projected/1deb337e-b170-438e-88d7-06fd5b3aa0ef-kube-api-access-sn6v4\") pod \"nova-api-0\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.325720 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.380638 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:22 crc kubenswrapper[4879]: W1125 14:53:22.884269 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1deb337e_b170_438e_88d7_06fd5b3aa0ef.slice/crio-a741defc4de16a17da61e3e0e517b4e56ffea50478c09b2649cea85155e45159 WatchSource:0}: Error finding container a741defc4de16a17da61e3e0e517b4e56ffea50478c09b2649cea85155e45159: Status 404 returned error can't find the container with id a741defc4de16a17da61e3e0e517b4e56ffea50478c09b2649cea85155e45159 Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.885647 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.967785 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1deb337e-b170-438e-88d7-06fd5b3aa0ef","Type":"ContainerStarted","Data":"a741defc4de16a17da61e3e0e517b4e56ffea50478c09b2649cea85155e45159"} Nov 25 14:53:22 crc kubenswrapper[4879]: I1125 14:53:22.969295 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerStarted","Data":"3c41c6b7425791d59c6df31c8fe655c7055a212627fce7c34e1c381dd3fe0c78"} Nov 25 14:53:23 crc kubenswrapper[4879]: I1125 14:53:23.657036 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="908eba86-8426-4692-beb1-c6a2324466f2" path="/var/lib/kubelet/pods/908eba86-8426-4692-beb1-c6a2324466f2/volumes" Nov 25 14:53:23 crc kubenswrapper[4879]: I1125 14:53:23.982411 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerStarted","Data":"946e3d89d413078837a3446d11985d445eb85d6f25cc051bdb967850d1056a98"} Nov 25 14:53:23 crc kubenswrapper[4879]: I1125 14:53:23.984501 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1deb337e-b170-438e-88d7-06fd5b3aa0ef","Type":"ContainerStarted","Data":"60a8e28da190fc1ea2f05959f3215af32c53be7afc7e37e5e944525328bcabf5"} Nov 25 14:53:23 crc kubenswrapper[4879]: I1125 14:53:23.984563 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1deb337e-b170-438e-88d7-06fd5b3aa0ef","Type":"ContainerStarted","Data":"b6f459e33b90283f1eee923c67e0184a19e23d40aa65abf5c8001abaec4de47d"} Nov 25 14:53:24 crc kubenswrapper[4879]: I1125 14:53:24.344115 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:53:24 crc kubenswrapper[4879]: I1125 14:53:24.366615 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.366592757 podStartE2EDuration="3.366592757s" podCreationTimestamp="2025-11-25 14:53:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:24.013389402 +0000 UTC m=+1695.616802493" watchObservedRunningTime="2025-11-25 14:53:24.366592757 +0000 UTC m=+1695.970005828" Nov 25 14:53:24 crc kubenswrapper[4879]: I1125 14:53:24.422319 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-77frl"] Nov 25 14:53:24 crc kubenswrapper[4879]: I1125 14:53:24.422616 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-77frl" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerName="dnsmasq-dns" containerID="cri-o://14723245bbfb3df2adc0ca36d6f3662562c586c27a3bcdaf7d7283f32dac0cc8" gracePeriod=10 Nov 25 14:53:24 crc kubenswrapper[4879]: I1125 14:53:24.999364 4879 generic.go:334] "Generic (PLEG): container finished" podID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerID="14723245bbfb3df2adc0ca36d6f3662562c586c27a3bcdaf7d7283f32dac0cc8" exitCode=0 Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:24.999457 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-77frl" event={"ID":"2fcb28ab-a9d4-43df-9beb-b6658e91e39c","Type":"ContainerDied","Data":"14723245bbfb3df2adc0ca36d6f3662562c586c27a3bcdaf7d7283f32dac0cc8"} Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.479116 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.757177 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.819221 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-swift-storage-0\") pod \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.819290 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-sb\") pod \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.819349 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzx27\" (UniqueName: \"kubernetes.io/projected/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-kube-api-access-jzx27\") pod \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.819406 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-config\") pod \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.820880 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-nb\") pod \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.820930 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-svc\") pod \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\" (UID: \"2fcb28ab-a9d4-43df-9beb-b6658e91e39c\") " Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.868003 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-kube-api-access-jzx27" (OuterVolumeSpecName: "kube-api-access-jzx27") pod "2fcb28ab-a9d4-43df-9beb-b6658e91e39c" (UID: "2fcb28ab-a9d4-43df-9beb-b6658e91e39c"). InnerVolumeSpecName "kube-api-access-jzx27". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.890719 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2fcb28ab-a9d4-43df-9beb-b6658e91e39c" (UID: "2fcb28ab-a9d4-43df-9beb-b6658e91e39c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.892647 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2fcb28ab-a9d4-43df-9beb-b6658e91e39c" (UID: "2fcb28ab-a9d4-43df-9beb-b6658e91e39c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.903798 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2fcb28ab-a9d4-43df-9beb-b6658e91e39c" (UID: "2fcb28ab-a9d4-43df-9beb-b6658e91e39c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.914207 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2fcb28ab-a9d4-43df-9beb-b6658e91e39c" (UID: "2fcb28ab-a9d4-43df-9beb-b6658e91e39c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.914431 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-config" (OuterVolumeSpecName: "config") pod "2fcb28ab-a9d4-43df-9beb-b6658e91e39c" (UID: "2fcb28ab-a9d4-43df-9beb-b6658e91e39c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.923225 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.923271 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzx27\" (UniqueName: \"kubernetes.io/projected/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-kube-api-access-jzx27\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.923284 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.923293 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.923303 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:25 crc kubenswrapper[4879]: I1125 14:53:25.923313 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2fcb28ab-a9d4-43df-9beb-b6658e91e39c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.011421 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-77frl" event={"ID":"2fcb28ab-a9d4-43df-9beb-b6658e91e39c","Type":"ContainerDied","Data":"0a8354e52d5d58e1f91fdbc26af586129c3f3bc66d633372e3a6261865009b9f"} Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.011488 4879 scope.go:117] "RemoveContainer" containerID="14723245bbfb3df2adc0ca36d6f3662562c586c27a3bcdaf7d7283f32dac0cc8" Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.011647 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-77frl" Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.022890 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerStarted","Data":"0b47fab24bee0e38de52810e8aac9e447edcc833a507ca4dc0bc3be5ed22829d"} Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.050141 4879 scope.go:117] "RemoveContainer" containerID="0a434fb6a19b168824100b0429593a0f740fef032e6170bd52459caab3ec34fe" Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.080292 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-77frl"] Nov 25 14:53:26 crc kubenswrapper[4879]: I1125 14:53:26.092422 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-77frl"] Nov 25 14:53:27 crc kubenswrapper[4879]: I1125 14:53:27.671847 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" path="/var/lib/kubelet/pods/2fcb28ab-a9d4-43df-9beb-b6658e91e39c/volumes" Nov 25 14:53:28 crc kubenswrapper[4879]: I1125 14:53:28.045270 4879 generic.go:334] "Generic (PLEG): container finished" podID="9620e674-5089-4560-8b81-cf2399d0ae7c" containerID="4b176e25b3d3736f8a1ed0025839a36198bbcdf4c98a15829d74dfff88a6fc04" exitCode=0 Nov 25 14:53:28 crc kubenswrapper[4879]: I1125 14:53:28.045314 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2xzrl" event={"ID":"9620e674-5089-4560-8b81-cf2399d0ae7c","Type":"ContainerDied","Data":"4b176e25b3d3736f8a1ed0025839a36198bbcdf4c98a15829d74dfff88a6fc04"} Nov 25 14:53:28 crc kubenswrapper[4879]: I1125 14:53:28.646035 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:53:28 crc kubenswrapper[4879]: E1125 14:53:28.646653 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.059543 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerStarted","Data":"10a617444ae16388f07c8ef9802441bb3504b64919ce0e9b060337efd73ed89a"} Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.438069 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.509883 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-combined-ca-bundle\") pod \"9620e674-5089-4560-8b81-cf2399d0ae7c\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.509943 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-config-data\") pod \"9620e674-5089-4560-8b81-cf2399d0ae7c\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.509998 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-scripts\") pod \"9620e674-5089-4560-8b81-cf2399d0ae7c\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.510039 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2lmm\" (UniqueName: \"kubernetes.io/projected/9620e674-5089-4560-8b81-cf2399d0ae7c-kube-api-access-g2lmm\") pod \"9620e674-5089-4560-8b81-cf2399d0ae7c\" (UID: \"9620e674-5089-4560-8b81-cf2399d0ae7c\") " Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.516418 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9620e674-5089-4560-8b81-cf2399d0ae7c-kube-api-access-g2lmm" (OuterVolumeSpecName: "kube-api-access-g2lmm") pod "9620e674-5089-4560-8b81-cf2399d0ae7c" (UID: "9620e674-5089-4560-8b81-cf2399d0ae7c"). InnerVolumeSpecName "kube-api-access-g2lmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.516658 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-scripts" (OuterVolumeSpecName: "scripts") pod "9620e674-5089-4560-8b81-cf2399d0ae7c" (UID: "9620e674-5089-4560-8b81-cf2399d0ae7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.538432 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-config-data" (OuterVolumeSpecName: "config-data") pod "9620e674-5089-4560-8b81-cf2399d0ae7c" (UID: "9620e674-5089-4560-8b81-cf2399d0ae7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.539744 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9620e674-5089-4560-8b81-cf2399d0ae7c" (UID: "9620e674-5089-4560-8b81-cf2399d0ae7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.612346 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.612387 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.612396 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9620e674-5089-4560-8b81-cf2399d0ae7c-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:29 crc kubenswrapper[4879]: I1125 14:53:29.612405 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2lmm\" (UniqueName: \"kubernetes.io/projected/9620e674-5089-4560-8b81-cf2399d0ae7c-kube-api-access-g2lmm\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.071230 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2xzrl" event={"ID":"9620e674-5089-4560-8b81-cf2399d0ae7c","Type":"ContainerDied","Data":"126ec0d944042a2d20b0482d813ba5feedafb9ae1ad43d90187ed5856940a7d3"} Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.071273 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="126ec0d944042a2d20b0482d813ba5feedafb9ae1ad43d90187ed5856940a7d3" Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.071326 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2xzrl" Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.247274 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.247561 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-log" containerID="cri-o://b6f459e33b90283f1eee923c67e0184a19e23d40aa65abf5c8001abaec4de47d" gracePeriod=30 Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.248118 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-api" containerID="cri-o://60a8e28da190fc1ea2f05959f3215af32c53be7afc7e37e5e944525328bcabf5" gracePeriod=30 Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.264279 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.264586 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a9b40c6c-3ff1-4986-9d35-63603cf1af8e" containerName="nova-scheduler-scheduler" containerID="cri-o://1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433" gracePeriod=30 Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.279882 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.280188 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-log" containerID="cri-o://6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b" gracePeriod=30 Nov 25 14:53:30 crc kubenswrapper[4879]: I1125 14:53:30.280321 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-metadata" containerID="cri-o://7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301" gracePeriod=30 Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.093593 4879 generic.go:334] "Generic (PLEG): container finished" podID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerID="6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b" exitCode=143 Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.093693 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7299d2c6-54b2-4989-a78d-15d0ec713546","Type":"ContainerDied","Data":"6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b"} Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.098760 4879 generic.go:334] "Generic (PLEG): container finished" podID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerID="60a8e28da190fc1ea2f05959f3215af32c53be7afc7e37e5e944525328bcabf5" exitCode=0 Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.098789 4879 generic.go:334] "Generic (PLEG): container finished" podID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerID="b6f459e33b90283f1eee923c67e0184a19e23d40aa65abf5c8001abaec4de47d" exitCode=143 Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.098810 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1deb337e-b170-438e-88d7-06fd5b3aa0ef","Type":"ContainerDied","Data":"60a8e28da190fc1ea2f05959f3215af32c53be7afc7e37e5e944525328bcabf5"} Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.098838 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1deb337e-b170-438e-88d7-06fd5b3aa0ef","Type":"ContainerDied","Data":"b6f459e33b90283f1eee923c67e0184a19e23d40aa65abf5c8001abaec4de47d"} Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.753198 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866014 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-combined-ca-bundle\") pod \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866111 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1deb337e-b170-438e-88d7-06fd5b3aa0ef-logs\") pod \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866206 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-internal-tls-certs\") pod \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866246 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-public-tls-certs\") pod \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866365 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn6v4\" (UniqueName: \"kubernetes.io/projected/1deb337e-b170-438e-88d7-06fd5b3aa0ef-kube-api-access-sn6v4\") pod \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866391 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-config-data\") pod \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\" (UID: \"1deb337e-b170-438e-88d7-06fd5b3aa0ef\") " Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.866980 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1deb337e-b170-438e-88d7-06fd5b3aa0ef-logs" (OuterVolumeSpecName: "logs") pod "1deb337e-b170-438e-88d7-06fd5b3aa0ef" (UID: "1deb337e-b170-438e-88d7-06fd5b3aa0ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.876394 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1deb337e-b170-438e-88d7-06fd5b3aa0ef-kube-api-access-sn6v4" (OuterVolumeSpecName: "kube-api-access-sn6v4") pod "1deb337e-b170-438e-88d7-06fd5b3aa0ef" (UID: "1deb337e-b170-438e-88d7-06fd5b3aa0ef"). InnerVolumeSpecName "kube-api-access-sn6v4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.892859 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-config-data" (OuterVolumeSpecName: "config-data") pod "1deb337e-b170-438e-88d7-06fd5b3aa0ef" (UID: "1deb337e-b170-438e-88d7-06fd5b3aa0ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.893076 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1deb337e-b170-438e-88d7-06fd5b3aa0ef" (UID: "1deb337e-b170-438e-88d7-06fd5b3aa0ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.922757 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1deb337e-b170-438e-88d7-06fd5b3aa0ef" (UID: "1deb337e-b170-438e-88d7-06fd5b3aa0ef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.925100 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1deb337e-b170-438e-88d7-06fd5b3aa0ef" (UID: "1deb337e-b170-438e-88d7-06fd5b3aa0ef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.968260 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn6v4\" (UniqueName: \"kubernetes.io/projected/1deb337e-b170-438e-88d7-06fd5b3aa0ef-kube-api-access-sn6v4\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.968300 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.968316 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.968332 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1deb337e-b170-438e-88d7-06fd5b3aa0ef-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.968345 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:31 crc kubenswrapper[4879]: I1125 14:53:31.968357 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1deb337e-b170-438e-88d7-06fd5b3aa0ef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.111182 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1deb337e-b170-438e-88d7-06fd5b3aa0ef","Type":"ContainerDied","Data":"a741defc4de16a17da61e3e0e517b4e56ffea50478c09b2649cea85155e45159"} Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.111234 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.111319 4879 scope.go:117] "RemoveContainer" containerID="60a8e28da190fc1ea2f05959f3215af32c53be7afc7e37e5e944525328bcabf5" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.114493 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerStarted","Data":"cbadc53e85256e4bfdbe1b1d3b2ce2bbe19cc469e3ffc3a3cb5d56a035939efe"} Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.114676 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.135520 4879 scope.go:117] "RemoveContainer" containerID="b6f459e33b90283f1eee923c67e0184a19e23d40aa65abf5c8001abaec4de47d" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.159146 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.837172669 podStartE2EDuration="11.159102463s" podCreationTimestamp="2025-11-25 14:53:21 +0000 UTC" firstStartedPulling="2025-11-25 14:53:22.327361337 +0000 UTC m=+1693.930774408" lastFinishedPulling="2025-11-25 14:53:30.649291131 +0000 UTC m=+1702.252704202" observedRunningTime="2025-11-25 14:53:32.135536081 +0000 UTC m=+1703.738949152" watchObservedRunningTime="2025-11-25 14:53:32.159102463 +0000 UTC m=+1703.762515534" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.207208 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.218924 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.229742 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:32 crc kubenswrapper[4879]: E1125 14:53:32.230378 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-log" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230403 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-log" Nov 25 14:53:32 crc kubenswrapper[4879]: E1125 14:53:32.230424 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9620e674-5089-4560-8b81-cf2399d0ae7c" containerName="nova-manage" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230430 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9620e674-5089-4560-8b81-cf2399d0ae7c" containerName="nova-manage" Nov 25 14:53:32 crc kubenswrapper[4879]: E1125 14:53:32.230466 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerName="init" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230493 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerName="init" Nov 25 14:53:32 crc kubenswrapper[4879]: E1125 14:53:32.230506 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-api" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230512 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-api" Nov 25 14:53:32 crc kubenswrapper[4879]: E1125 14:53:32.230519 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerName="dnsmasq-dns" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230524 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerName="dnsmasq-dns" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230694 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9620e674-5089-4560-8b81-cf2399d0ae7c" containerName="nova-manage" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230707 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-api" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230724 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" containerName="nova-api-log" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.230732 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fcb28ab-a9d4-43df-9beb-b6658e91e39c" containerName="dnsmasq-dns" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.231874 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.234979 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.235351 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.235487 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.241141 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.280005 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.280099 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-public-tls-certs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.280155 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4835e527-a539-4cc7-8730-d75f0c5af849-logs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.280177 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.280245 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz2dk\" (UniqueName: \"kubernetes.io/projected/4835e527-a539-4cc7-8730-d75f0c5af849-kube-api-access-cz2dk\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.280318 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-config-data\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.382983 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz2dk\" (UniqueName: \"kubernetes.io/projected/4835e527-a539-4cc7-8730-d75f0c5af849-kube-api-access-cz2dk\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.383137 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-config-data\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.383192 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.383251 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-public-tls-certs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.383295 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4835e527-a539-4cc7-8730-d75f0c5af849-logs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.383312 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.384363 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4835e527-a539-4cc7-8730-d75f0c5af849-logs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.388364 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-public-tls-certs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.388722 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.389423 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.397452 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-config-data\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.399626 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz2dk\" (UniqueName: \"kubernetes.io/projected/4835e527-a539-4cc7-8730-d75f0c5af849-kube-api-access-cz2dk\") pod \"nova-api-0\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.552265 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.800242 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.893221 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-combined-ca-bundle\") pod \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.893340 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtr78\" (UniqueName: \"kubernetes.io/projected/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-kube-api-access-gtr78\") pod \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.893503 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-config-data\") pod \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\" (UID: \"a9b40c6c-3ff1-4986-9d35-63603cf1af8e\") " Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.897937 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-kube-api-access-gtr78" (OuterVolumeSpecName: "kube-api-access-gtr78") pod "a9b40c6c-3ff1-4986-9d35-63603cf1af8e" (UID: "a9b40c6c-3ff1-4986-9d35-63603cf1af8e"). InnerVolumeSpecName "kube-api-access-gtr78". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.923559 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-config-data" (OuterVolumeSpecName: "config-data") pod "a9b40c6c-3ff1-4986-9d35-63603cf1af8e" (UID: "a9b40c6c-3ff1-4986-9d35-63603cf1af8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.948418 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9b40c6c-3ff1-4986-9d35-63603cf1af8e" (UID: "a9b40c6c-3ff1-4986-9d35-63603cf1af8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.995252 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.995303 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtr78\" (UniqueName: \"kubernetes.io/projected/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-kube-api-access-gtr78\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:32 crc kubenswrapper[4879]: I1125 14:53:32.995314 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9b40c6c-3ff1-4986-9d35-63603cf1af8e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:33 crc kubenswrapper[4879]: W1125 14:53:33.033323 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4835e527_a539_4cc7_8730_d75f0c5af849.slice/crio-6df21eafbbbf98b51fdbe3a51dbb3b42d81399bab225a48761b673a600b26cd3 WatchSource:0}: Error finding container 6df21eafbbbf98b51fdbe3a51dbb3b42d81399bab225a48761b673a600b26cd3: Status 404 returned error can't find the container with id 6df21eafbbbf98b51fdbe3a51dbb3b42d81399bab225a48761b673a600b26cd3 Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.036948 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.130899 4879 generic.go:334] "Generic (PLEG): container finished" podID="a9b40c6c-3ff1-4986-9d35-63603cf1af8e" containerID="1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433" exitCode=0 Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.130958 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9b40c6c-3ff1-4986-9d35-63603cf1af8e","Type":"ContainerDied","Data":"1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433"} Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.130986 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9b40c6c-3ff1-4986-9d35-63603cf1af8e","Type":"ContainerDied","Data":"e6f1fae5eb699fac185426515382b93bca6a27041889b295be0e3520d896814b"} Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.131003 4879 scope.go:117] "RemoveContainer" containerID="1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.131091 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.137633 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4835e527-a539-4cc7-8730-d75f0c5af849","Type":"ContainerStarted","Data":"6df21eafbbbf98b51fdbe3a51dbb3b42d81399bab225a48761b673a600b26cd3"} Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.162363 4879 scope.go:117] "RemoveContainer" containerID="1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433" Nov 25 14:53:33 crc kubenswrapper[4879]: E1125 14:53:33.162845 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433\": container with ID starting with 1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433 not found: ID does not exist" containerID="1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.162879 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433"} err="failed to get container status \"1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433\": rpc error: code = NotFound desc = could not find container \"1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433\": container with ID starting with 1c226fcfe4c6dcf1143b155b44bb8699e23b7597b1fd1b2d5d7b74aa59ef2433 not found: ID does not exist" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.183103 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.201461 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.217973 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:53:33 crc kubenswrapper[4879]: E1125 14:53:33.218514 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9b40c6c-3ff1-4986-9d35-63603cf1af8e" containerName="nova-scheduler-scheduler" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.218531 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9b40c6c-3ff1-4986-9d35-63603cf1af8e" containerName="nova-scheduler-scheduler" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.218762 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9b40c6c-3ff1-4986-9d35-63603cf1af8e" containerName="nova-scheduler-scheduler" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.219569 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.223962 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.226891 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.300788 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.301157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpxnf\" (UniqueName: \"kubernetes.io/projected/0d3273dc-a6fa-43b7-8225-7a175f55da77-kube-api-access-zpxnf\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.301246 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-config-data\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.403987 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpxnf\" (UniqueName: \"kubernetes.io/projected/0d3273dc-a6fa-43b7-8225-7a175f55da77-kube-api-access-zpxnf\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.404566 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-config-data\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.404712 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.409957 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-config-data\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.416543 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.435364 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpxnf\" (UniqueName: \"kubernetes.io/projected/0d3273dc-a6fa-43b7-8225-7a175f55da77-kube-api-access-zpxnf\") pod \"nova-scheduler-0\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.482720 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:56116->10.217.0.200:8775: read: connection reset by peer" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.482738 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:56112->10.217.0.200:8775: read: connection reset by peer" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.542786 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.687561 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1deb337e-b170-438e-88d7-06fd5b3aa0ef" path="/var/lib/kubelet/pods/1deb337e-b170-438e-88d7-06fd5b3aa0ef/volumes" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.688778 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9b40c6c-3ff1-4986-9d35-63603cf1af8e" path="/var/lib/kubelet/pods/a9b40c6c-3ff1-4986-9d35-63603cf1af8e/volumes" Nov 25 14:53:33 crc kubenswrapper[4879]: I1125 14:53:33.977249 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.020847 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-nova-metadata-tls-certs\") pod \"7299d2c6-54b2-4989-a78d-15d0ec713546\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.021001 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-combined-ca-bundle\") pod \"7299d2c6-54b2-4989-a78d-15d0ec713546\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.021035 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-config-data\") pod \"7299d2c6-54b2-4989-a78d-15d0ec713546\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.021088 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7299d2c6-54b2-4989-a78d-15d0ec713546-logs\") pod \"7299d2c6-54b2-4989-a78d-15d0ec713546\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.021231 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkb2w\" (UniqueName: \"kubernetes.io/projected/7299d2c6-54b2-4989-a78d-15d0ec713546-kube-api-access-bkb2w\") pod \"7299d2c6-54b2-4989-a78d-15d0ec713546\" (UID: \"7299d2c6-54b2-4989-a78d-15d0ec713546\") " Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.026900 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7299d2c6-54b2-4989-a78d-15d0ec713546-kube-api-access-bkb2w" (OuterVolumeSpecName: "kube-api-access-bkb2w") pod "7299d2c6-54b2-4989-a78d-15d0ec713546" (UID: "7299d2c6-54b2-4989-a78d-15d0ec713546"). InnerVolumeSpecName "kube-api-access-bkb2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.027307 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7299d2c6-54b2-4989-a78d-15d0ec713546-logs" (OuterVolumeSpecName: "logs") pod "7299d2c6-54b2-4989-a78d-15d0ec713546" (UID: "7299d2c6-54b2-4989-a78d-15d0ec713546"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.067260 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-config-data" (OuterVolumeSpecName: "config-data") pod "7299d2c6-54b2-4989-a78d-15d0ec713546" (UID: "7299d2c6-54b2-4989-a78d-15d0ec713546"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.080028 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7299d2c6-54b2-4989-a78d-15d0ec713546" (UID: "7299d2c6-54b2-4989-a78d-15d0ec713546"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.087083 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7299d2c6-54b2-4989-a78d-15d0ec713546" (UID: "7299d2c6-54b2-4989-a78d-15d0ec713546"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.123587 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkb2w\" (UniqueName: \"kubernetes.io/projected/7299d2c6-54b2-4989-a78d-15d0ec713546-kube-api-access-bkb2w\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.123973 4879 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.123987 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.124000 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7299d2c6-54b2-4989-a78d-15d0ec713546-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.124012 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7299d2c6-54b2-4989-a78d-15d0ec713546-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:53:34 crc kubenswrapper[4879]: W1125 14:53:34.138979 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d3273dc_a6fa_43b7_8225_7a175f55da77.slice/crio-981eae220ea3989fe532ae3c951c31fba16f7e4ab11ef8921498cebd25e4d550 WatchSource:0}: Error finding container 981eae220ea3989fe532ae3c951c31fba16f7e4ab11ef8921498cebd25e4d550: Status 404 returned error can't find the container with id 981eae220ea3989fe532ae3c951c31fba16f7e4ab11ef8921498cebd25e4d550 Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.144137 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.167298 4879 generic.go:334] "Generic (PLEG): container finished" podID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerID="7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301" exitCode=0 Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.167412 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7299d2c6-54b2-4989-a78d-15d0ec713546","Type":"ContainerDied","Data":"7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301"} Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.167441 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7299d2c6-54b2-4989-a78d-15d0ec713546","Type":"ContainerDied","Data":"906e20b47cb04ba1cc98aa0bd5811af2aac253119c8048343095c24bbcdf522c"} Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.167458 4879 scope.go:117] "RemoveContainer" containerID="7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.167620 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.175866 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4835e527-a539-4cc7-8730-d75f0c5af849","Type":"ContainerStarted","Data":"af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed"} Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.175919 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4835e527-a539-4cc7-8730-d75f0c5af849","Type":"ContainerStarted","Data":"beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343"} Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.212463 4879 scope.go:117] "RemoveContainer" containerID="6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.220404 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.220384634 podStartE2EDuration="2.220384634s" podCreationTimestamp="2025-11-25 14:53:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:34.200679535 +0000 UTC m=+1705.804092636" watchObservedRunningTime="2025-11-25 14:53:34.220384634 +0000 UTC m=+1705.823797705" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.235664 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.244889 4879 scope.go:117] "RemoveContainer" containerID="7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301" Nov 25 14:53:34 crc kubenswrapper[4879]: E1125 14:53:34.245421 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301\": container with ID starting with 7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301 not found: ID does not exist" containerID="7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.245503 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301"} err="failed to get container status \"7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301\": rpc error: code = NotFound desc = could not find container \"7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301\": container with ID starting with 7056897dadcda45a87e9c211d9ce589ec96ac7bf7d419d3cab72981270399301 not found: ID does not exist" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.245540 4879 scope.go:117] "RemoveContainer" containerID="6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b" Nov 25 14:53:34 crc kubenswrapper[4879]: E1125 14:53:34.246007 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b\": container with ID starting with 6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b not found: ID does not exist" containerID="6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.246158 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b"} err="failed to get container status \"6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b\": rpc error: code = NotFound desc = could not find container \"6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b\": container with ID starting with 6daef1f32aa0f54806dcc43a7a5bcdf47dedc46ce2d74af989dabb4b5fa3658b not found: ID does not exist" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.248471 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.262223 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:53:34 crc kubenswrapper[4879]: E1125 14:53:34.262777 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-log" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.262803 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-log" Nov 25 14:53:34 crc kubenswrapper[4879]: E1125 14:53:34.262843 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-metadata" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.262852 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-metadata" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.263088 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-metadata" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.263144 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" containerName="nova-metadata-log" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.264429 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.267722 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.267966 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.289114 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.328546 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.328631 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6391ba8e-71b4-44d3-8a99-14ff66c61604-logs\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.328767 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-config-data\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.329080 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx7s4\" (UniqueName: \"kubernetes.io/projected/6391ba8e-71b4-44d3-8a99-14ff66c61604-kube-api-access-cx7s4\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.329293 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.431669 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-config-data\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.431744 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx7s4\" (UniqueName: \"kubernetes.io/projected/6391ba8e-71b4-44d3-8a99-14ff66c61604-kube-api-access-cx7s4\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.431783 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.431837 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.431878 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6391ba8e-71b4-44d3-8a99-14ff66c61604-logs\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.432434 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6391ba8e-71b4-44d3-8a99-14ff66c61604-logs\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.436241 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-config-data\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.437190 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.437208 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.450554 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx7s4\" (UniqueName: \"kubernetes.io/projected/6391ba8e-71b4-44d3-8a99-14ff66c61604-kube-api-access-cx7s4\") pod \"nova-metadata-0\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " pod="openstack/nova-metadata-0" Nov 25 14:53:34 crc kubenswrapper[4879]: I1125 14:53:34.601116 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:53:35 crc kubenswrapper[4879]: I1125 14:53:35.061701 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:53:35 crc kubenswrapper[4879]: W1125 14:53:35.079926 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6391ba8e_71b4_44d3_8a99_14ff66c61604.slice/crio-184037fbb26a2a47cbb3bbcd5e3311b5b03d0357c49f299d677581a7515577c0 WatchSource:0}: Error finding container 184037fbb26a2a47cbb3bbcd5e3311b5b03d0357c49f299d677581a7515577c0: Status 404 returned error can't find the container with id 184037fbb26a2a47cbb3bbcd5e3311b5b03d0357c49f299d677581a7515577c0 Nov 25 14:53:35 crc kubenswrapper[4879]: I1125 14:53:35.187026 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6391ba8e-71b4-44d3-8a99-14ff66c61604","Type":"ContainerStarted","Data":"184037fbb26a2a47cbb3bbcd5e3311b5b03d0357c49f299d677581a7515577c0"} Nov 25 14:53:35 crc kubenswrapper[4879]: I1125 14:53:35.191001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3273dc-a6fa-43b7-8225-7a175f55da77","Type":"ContainerStarted","Data":"e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65"} Nov 25 14:53:35 crc kubenswrapper[4879]: I1125 14:53:35.191061 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3273dc-a6fa-43b7-8225-7a175f55da77","Type":"ContainerStarted","Data":"981eae220ea3989fe532ae3c951c31fba16f7e4ab11ef8921498cebd25e4d550"} Nov 25 14:53:35 crc kubenswrapper[4879]: I1125 14:53:35.221094 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.221074002 podStartE2EDuration="2.221074002s" podCreationTimestamp="2025-11-25 14:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:35.209060149 +0000 UTC m=+1706.812473220" watchObservedRunningTime="2025-11-25 14:53:35.221074002 +0000 UTC m=+1706.824487073" Nov 25 14:53:35 crc kubenswrapper[4879]: I1125 14:53:35.661413 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7299d2c6-54b2-4989-a78d-15d0ec713546" path="/var/lib/kubelet/pods/7299d2c6-54b2-4989-a78d-15d0ec713546/volumes" Nov 25 14:53:36 crc kubenswrapper[4879]: I1125 14:53:36.213105 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6391ba8e-71b4-44d3-8a99-14ff66c61604","Type":"ContainerStarted","Data":"bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17"} Nov 25 14:53:36 crc kubenswrapper[4879]: I1125 14:53:36.213169 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6391ba8e-71b4-44d3-8a99-14ff66c61604","Type":"ContainerStarted","Data":"3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43"} Nov 25 14:53:36 crc kubenswrapper[4879]: I1125 14:53:36.248364 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.248306331 podStartE2EDuration="2.248306331s" podCreationTimestamp="2025-11-25 14:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:53:36.233367061 +0000 UTC m=+1707.836780142" watchObservedRunningTime="2025-11-25 14:53:36.248306331 +0000 UTC m=+1707.851719392" Nov 25 14:53:38 crc kubenswrapper[4879]: I1125 14:53:38.080254 4879 scope.go:117] "RemoveContainer" containerID="c9ce651ae7bd83610ac3da78634fbc805214ed291c9f3883f73abaec3e5353d9" Nov 25 14:53:38 crc kubenswrapper[4879]: I1125 14:53:38.543776 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 14:53:39 crc kubenswrapper[4879]: I1125 14:53:39.601832 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 14:53:39 crc kubenswrapper[4879]: I1125 14:53:39.601900 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 14:53:42 crc kubenswrapper[4879]: I1125 14:53:42.552761 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 14:53:42 crc kubenswrapper[4879]: I1125 14:53:42.553147 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 14:53:42 crc kubenswrapper[4879]: I1125 14:53:42.644891 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:53:42 crc kubenswrapper[4879]: E1125 14:53:42.645180 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:53:43 crc kubenswrapper[4879]: I1125 14:53:43.543497 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 25 14:53:43 crc kubenswrapper[4879]: I1125 14:53:43.566391 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 25 14:53:43 crc kubenswrapper[4879]: I1125 14:53:43.566540 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 25 14:53:43 crc kubenswrapper[4879]: I1125 14:53:43.571102 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 25 14:53:44 crc kubenswrapper[4879]: I1125 14:53:44.316155 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 25 14:53:44 crc kubenswrapper[4879]: I1125 14:53:44.602162 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 14:53:44 crc kubenswrapper[4879]: I1125 14:53:44.602223 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 14:53:45 crc kubenswrapper[4879]: I1125 14:53:45.615557 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 25 14:53:45 crc kubenswrapper[4879]: I1125 14:53:45.615890 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 25 14:53:51 crc kubenswrapper[4879]: I1125 14:53:51.787398 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 14:53:52 crc kubenswrapper[4879]: I1125 14:53:52.560258 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 14:53:52 crc kubenswrapper[4879]: I1125 14:53:52.560937 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 14:53:52 crc kubenswrapper[4879]: I1125 14:53:52.561023 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 14:53:52 crc kubenswrapper[4879]: I1125 14:53:52.570541 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 14:53:53 crc kubenswrapper[4879]: I1125 14:53:53.378053 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 14:53:53 crc kubenswrapper[4879]: I1125 14:53:53.386180 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 14:53:53 crc kubenswrapper[4879]: I1125 14:53:53.645901 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:53:53 crc kubenswrapper[4879]: E1125 14:53:53.646202 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:53:54 crc kubenswrapper[4879]: I1125 14:53:54.611823 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 14:53:54 crc kubenswrapper[4879]: I1125 14:53:54.612241 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 14:53:54 crc kubenswrapper[4879]: I1125 14:53:54.619268 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 14:53:54 crc kubenswrapper[4879]: I1125 14:53:54.622860 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 14:54:07 crc kubenswrapper[4879]: I1125 14:54:07.645822 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:54:07 crc kubenswrapper[4879]: E1125 14:54:07.646771 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.267204 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.267908 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" containerName="openstackclient" containerID="cri-o://20f8e7a470342784d528dc91bc65940874d5348dbae47cccff1c3f3137264375" gracePeriod=2 Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.292784 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.376504 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.485663 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.485733 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data podName:c1814b22-d1b3-4426-9fa2-f613640f63e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:13.985714898 +0000 UTC m=+1745.589127969 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data") pod "rabbitmq-cell1-server-0" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8") : configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.518934 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.521225 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="openstack-network-exporter" containerID="cri-o://73e8dd61029c94294aeb86b9c378ec7ea7b10d602ce332a8330879bb615f72bb" gracePeriod=300 Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.617179 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement30f0-account-delete-s2wtk"] Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.617696 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" containerName="openstackclient" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.617718 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" containerName="openstackclient" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.617909 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" containerName="openstackclient" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.618596 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.623472 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement30f0-account-delete-s2wtk"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.691273 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="ovsdbserver-nb" containerID="cri-o://59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc" gracePeriod=300 Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.739635 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.739884 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="ovn-northd" containerID="cri-o://6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" gracePeriod=30 Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.740017 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="openstack-network-exporter" containerID="cri-o://d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd" gracePeriod=30 Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.757257 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.793684 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd594\" (UniqueName: \"kubernetes.io/projected/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-kube-api-access-vd594\") pod \"placement30f0-account-delete-s2wtk\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.795984 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-operator-scripts\") pod \"placement30f0-account-delete-s2wtk\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.822022 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.841436 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican97a3-account-delete-gdmqs"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.842763 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.879594 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican97a3-account-delete-gdmqs"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.897073 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-operator-scripts\") pod \"placement30f0-account-delete-s2wtk\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.897295 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd594\" (UniqueName: \"kubernetes.io/projected/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-kube-api-access-vd594\") pod \"placement30f0-account-delete-s2wtk\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.898598 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-operator-scripts\") pod \"placement30f0-account-delete-s2wtk\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.898655 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.898695 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data podName:ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd nodeName:}" failed. No retries permitted until 2025-11-25 14:54:14.398681635 +0000 UTC m=+1746.002094706 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data") pod "rabbitmq-server-0" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd") : configmap "rabbitmq-config-data" not found Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.903394 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.926229 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 25 14:54:13 crc kubenswrapper[4879]: E1125 14:54:13.926290 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="ovn-northd" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.948756 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance4d84-account-delete-2z9mh"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.959258 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.971909 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd594\" (UniqueName: \"kubernetes.io/projected/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-kube-api-access-vd594\") pod \"placement30f0-account-delete-s2wtk\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.973076 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-zlv8f"] Nov 25 14:54:13 crc kubenswrapper[4879]: I1125 14:54:13.995843 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-zlv8f"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.000192 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea2d8cdd-6c47-4cf7-b336-933762d2c445-operator-scripts\") pod \"barbican97a3-account-delete-gdmqs\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.000297 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5bq5\" (UniqueName: \"kubernetes.io/projected/ea2d8cdd-6c47-4cf7-b336-933762d2c445-kube-api-access-w5bq5\") pod \"barbican97a3-account-delete-gdmqs\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: E1125 14:54:14.000435 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:14 crc kubenswrapper[4879]: E1125 14:54:14.000479 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data podName:c1814b22-d1b3-4426-9fa2-f613640f63e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:15.000465663 +0000 UTC m=+1746.603878744 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data") pod "rabbitmq-cell1-server-0" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8") : configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.024843 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance4d84-account-delete-2z9mh"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.105190 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder6256-account-delete-txbhm"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.107291 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.108422 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64w7s\" (UniqueName: \"kubernetes.io/projected/ca53b222-8018-4445-aa86-5401dbc847b5-kube-api-access-64w7s\") pod \"glance4d84-account-delete-2z9mh\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.108509 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea2d8cdd-6c47-4cf7-b336-933762d2c445-operator-scripts\") pod \"barbican97a3-account-delete-gdmqs\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.108544 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5bq5\" (UniqueName: \"kubernetes.io/projected/ea2d8cdd-6c47-4cf7-b336-933762d2c445-kube-api-access-w5bq5\") pod \"barbican97a3-account-delete-gdmqs\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.108654 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca53b222-8018-4445-aa86-5401dbc847b5-operator-scripts\") pod \"glance4d84-account-delete-2z9mh\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.109475 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea2d8cdd-6c47-4cf7-b336-933762d2c445-operator-scripts\") pod \"barbican97a3-account-delete-gdmqs\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.119243 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder6256-account-delete-txbhm"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.148245 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5bq5\" (UniqueName: \"kubernetes.io/projected/ea2d8cdd-6c47-4cf7-b336-933762d2c445-kube-api-access-w5bq5\") pod \"barbican97a3-account-delete-gdmqs\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.148359 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-b29mr"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.185194 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-b29mr"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.214940 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-2n7ff"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.252840 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-operator-scripts\") pod \"cinder6256-account-delete-txbhm\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.252900 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8z7h\" (UniqueName: \"kubernetes.io/projected/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-kube-api-access-z8z7h\") pod \"cinder6256-account-delete-txbhm\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.253112 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca53b222-8018-4445-aa86-5401dbc847b5-operator-scripts\") pod \"glance4d84-account-delete-2z9mh\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.253229 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64w7s\" (UniqueName: \"kubernetes.io/projected/ca53b222-8018-4445-aa86-5401dbc847b5-kube-api-access-64w7s\") pod \"glance4d84-account-delete-2z9mh\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.254428 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca53b222-8018-4445-aa86-5401dbc847b5-operator-scripts\") pod \"glance4d84-account-delete-2z9mh\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.254496 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.297148 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64w7s\" (UniqueName: \"kubernetes.io/projected/ca53b222-8018-4445-aa86-5401dbc847b5-kube-api-access-64w7s\") pod \"glance4d84-account-delete-2z9mh\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.331207 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronb847-account-delete-qvsn6"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.335904 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.355236 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-operator-scripts\") pod \"cinder6256-account-delete-txbhm\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.355539 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8z7h\" (UniqueName: \"kubernetes.io/projected/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-kube-api-access-z8z7h\") pod \"cinder6256-account-delete-txbhm\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.356457 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-operator-scripts\") pod \"cinder6256-account-delete-txbhm\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.380243 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-q22r5"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.380479 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-q22r5" podUID="1d972e68-542e-456d-9b40-5cf7aa4b68c7" containerName="openstack-network-exporter" containerID="cri-o://dfa8795114b5f0a0a7289db1a6c75a7e18bf770c9753b6f30a6cc638d3462c39" gracePeriod=30 Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.399771 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8z7h\" (UniqueName: \"kubernetes.io/projected/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-kube-api-access-z8z7h\") pod \"cinder6256-account-delete-txbhm\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.413823 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronb847-account-delete-qvsn6"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.429875 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.438370 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gml5w"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.457466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts\") pod \"neutronb847-account-delete-qvsn6\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.459850 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9vc2\" (UniqueName: \"kubernetes.io/projected/4542a25b-82fa-419d-a6bb-8a2f653d88a1-kube-api-access-w9vc2\") pod \"neutronb847-account-delete-qvsn6\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: E1125 14:54:14.460372 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 25 14:54:14 crc kubenswrapper[4879]: E1125 14:54:14.460521 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data podName:ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd nodeName:}" failed. No retries permitted until 2025-11-25 14:54:15.460500542 +0000 UTC m=+1747.063913613 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data") pod "rabbitmq-server-0" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd") : configmap "rabbitmq-config-data" not found Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.486667 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell0c4f1-account-delete-cfzvs"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.488143 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.525452 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.574311 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.592645 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0c4f1-account-delete-cfzvs"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.642678 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5944a1b-2616-48bd-9695-32641324e1c2-operator-scripts\") pod \"novacell0c4f1-account-delete-cfzvs\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.642820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts\") pod \"neutronb847-account-delete-qvsn6\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.642867 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vbbl\" (UniqueName: \"kubernetes.io/projected/e5944a1b-2616-48bd-9695-32641324e1c2-kube-api-access-9vbbl\") pod \"novacell0c4f1-account-delete-cfzvs\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.642982 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9vc2\" (UniqueName: \"kubernetes.io/projected/4542a25b-82fa-419d-a6bb-8a2f653d88a1-kube-api-access-w9vc2\") pod \"neutronb847-account-delete-qvsn6\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.645283 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts\") pod \"neutronb847-account-delete-qvsn6\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.699412 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9vc2\" (UniqueName: \"kubernetes.io/projected/4542a25b-82fa-419d-a6bb-8a2f653d88a1-kube-api-access-w9vc2\") pod \"neutronb847-account-delete-qvsn6\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.763443 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5944a1b-2616-48bd-9695-32641324e1c2-operator-scripts\") pod \"novacell0c4f1-account-delete-cfzvs\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.763579 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vbbl\" (UniqueName: \"kubernetes.io/projected/e5944a1b-2616-48bd-9695-32641324e1c2-kube-api-access-9vbbl\") pod \"novacell0c4f1-account-delete-cfzvs\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.769947 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5944a1b-2616-48bd-9695-32641324e1c2-operator-scripts\") pod \"novacell0c4f1-account-delete-cfzvs\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.773377 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-q22r5_1d972e68-542e-456d-9b40-5cf7aa4b68c7/openstack-network-exporter/0.log" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.773419 4879 generic.go:334] "Generic (PLEG): container finished" podID="1d972e68-542e-456d-9b40-5cf7aa4b68c7" containerID="dfa8795114b5f0a0a7289db1a6c75a7e18bf770c9753b6f30a6cc638d3462c39" exitCode=2 Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.773485 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-q22r5" event={"ID":"1d972e68-542e-456d-9b40-5cf7aa4b68c7","Type":"ContainerDied","Data":"dfa8795114b5f0a0a7289db1a6c75a7e18bf770c9753b6f30a6cc638d3462c39"} Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.791162 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vbbl\" (UniqueName: \"kubernetes.io/projected/e5944a1b-2616-48bd-9695-32641324e1c2-kube-api-access-9vbbl\") pod \"novacell0c4f1-account-delete-cfzvs\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.793534 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi40fc-account-delete-2nm97"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.795718 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:14 crc kubenswrapper[4879]: E1125 14:54:14.844364 4879 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-gml5w" message="Exiting ovn-controller (1) " Nov 25 14:54:14 crc kubenswrapper[4879]: E1125 14:54:14.844428 4879 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-gml5w" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" containerID="cri-o://f091e7cd3e3f4ba1a8760e4a6cd4c1fa2c752f285fc9bff0e20deff09ae868d2" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.844467 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-gml5w" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" containerID="cri-o://f091e7cd3e3f4ba1a8760e4a6cd4c1fa2c752f285fc9bff0e20deff09ae868d2" gracePeriod=30 Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.844873 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi40fc-account-delete-2nm97"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.855840 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f4987514-2183-451f-98a0-3942895acd0f/ovsdbserver-nb/0.log" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.855896 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4987514-2183-451f-98a0-3942895acd0f" containerID="73e8dd61029c94294aeb86b9c378ec7ea7b10d602ce332a8330879bb615f72bb" exitCode=2 Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.855915 4879 generic.go:334] "Generic (PLEG): container finished" podID="f4987514-2183-451f-98a0-3942895acd0f" containerID="59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc" exitCode=143 Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.855983 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f4987514-2183-451f-98a0-3942895acd0f","Type":"ContainerDied","Data":"73e8dd61029c94294aeb86b9c378ec7ea7b10d602ce332a8330879bb615f72bb"} Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.856019 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f4987514-2183-451f-98a0-3942895acd0f","Type":"ContainerDied","Data":"59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc"} Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.884111 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-7s5gb"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.896087 4879 generic.go:334] "Generic (PLEG): container finished" podID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerID="d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd" exitCode=2 Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.896231 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f8f130d5-685b-4b37-89bb-b9536109c8fb","Type":"ContainerDied","Data":"d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd"} Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.937197 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-7s5gb"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.938804 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.947885 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-bstpg"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.969132 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx2kk\" (UniqueName: \"kubernetes.io/projected/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-kube-api-access-fx2kk\") pod \"novaapi40fc-account-delete-2nm97\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.969236 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-operator-scripts\") pod \"novaapi40fc-account-delete-2nm97\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.975884 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-bstpg"] Nov 25 14:54:14 crc kubenswrapper[4879]: I1125 14:54:14.987440 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.003613 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zs5xv"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.003876 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerName="dnsmasq-dns" containerID="cri-o://64210090c0d1d50d340e81ed21d95f311647c849f5851a5b2d1cf4494b8483ae" gracePeriod=10 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.072479 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-78w72"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.077769 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx2kk\" (UniqueName: \"kubernetes.io/projected/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-kube-api-access-fx2kk\") pod \"novaapi40fc-account-delete-2nm97\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.078180 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-operator-scripts\") pod \"novaapi40fc-account-delete-2nm97\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:15 crc kubenswrapper[4879]: E1125 14:54:15.078703 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:15 crc kubenswrapper[4879]: E1125 14:54:15.078759 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data podName:c1814b22-d1b3-4426-9fa2-f613640f63e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:17.07874193 +0000 UTC m=+1748.682155001 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data") pod "rabbitmq-cell1-server-0" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8") : configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.080606 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-operator-scripts\") pod \"novaapi40fc-account-delete-2nm97\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.100907 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-78w72"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.113997 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.114769 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="openstack-network-exporter" containerID="cri-o://350d4e7b8e9d631147fe9a755af14db63aa3e7ee234ef7375cd6205d968d2327" gracePeriod=300 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.114933 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx2kk\" (UniqueName: \"kubernetes.io/projected/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-kube-api-access-fx2kk\") pod \"novaapi40fc-account-delete-2nm97\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.125036 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-94fd8ccd4-vp796"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.148347 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-94fd8ccd4-vp796" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-log" containerID="cri-o://1b6bde64e67dfaaeb2ea7d1af1b7e358481fefd8e638d1c70034361c14cd5032" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.148806 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-94fd8ccd4-vp796" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-api" containerID="cri-o://9c3fd5bfe1c4a686adda7380350f15818f581370513fbd5cb85f18e67e6f730e" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.149275 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-cjhrw"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.159950 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-cjhrw"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.242424 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.243511 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-server" containerID="cri-o://01d6163416959123eea9546db5e997dc58a1c8fb48cc8df296356f31a71cb2f2" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.243981 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="swift-recon-cron" containerID="cri-o://1a3386a1d224fe22edaf0215700f7f28f92829314b7558a91a246f504e5ef884" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244047 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="rsync" containerID="cri-o://cd5d7ea0e9867e8b0fe2167eed20836d54166e4956798392a0fa624050ba2841" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244090 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-expirer" containerID="cri-o://d24b61816a0df221f5cca68298192d65a0db4ffd65d2f4a3f373892fc2581637" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244156 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-updater" containerID="cri-o://9aa6cecc8f842351e35fda9d74c697191565ff3b682681037963b8761a8ddb66" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244198 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-auditor" containerID="cri-o://44f30f9bf7a2177883d8a6f1b7f870687899a5c37317a986aa4bf85dbf743403" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244239 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-replicator" containerID="cri-o://49bdf74c01b8d49e4758eb2d1f183fd8edba4323eaf30b7d8764ca72f601ca8e" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244299 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-server" containerID="cri-o://5c9cb1ae5da818f9561819c0427fef18049f551233eaa6475166bbfe4e96a29e" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244343 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-updater" containerID="cri-o://78e26b5f462da8135c185dc405d7bb3e40a86f3a0a756b228397651aec46fca7" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244383 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-auditor" containerID="cri-o://78e8c927e3ebbf38f18921232f416853c52caaa147f9bd6e0b42cb5c79ac392f" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244435 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-replicator" containerID="cri-o://0e0569b112382ff911d6d5ffb10cba08cccdc02f2dc893c1ebc01b9c2863ce6b" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244475 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-server" containerID="cri-o://f2156b4d210c38de5222f44394d8ff73450ee1af32aec0303948ee68935f943c" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244516 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-reaper" containerID="cri-o://0f3579270e9a0136c7f68a5c3e04c11ba1d26b44c0c1ceb3b31b4cbca3cf4ba7" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244562 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-auditor" containerID="cri-o://eef1d095ea350449cd4e4c13b9b72afe1590ad3b549d49103ddfa4d450adeab0" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.244617 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-replicator" containerID="cri-o://20f98654045b89872f5df2f364f5b15e9060829a694cc0678d30a79c4ecdb272" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.250200 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-w29sx"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.305284 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-w29sx"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.416091 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.416632 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-2xzrl"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.499191 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-2xzrl"] Nov 25 14:54:15 crc kubenswrapper[4879]: E1125 14:54:15.514253 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 25 14:54:15 crc kubenswrapper[4879]: E1125 14:54:15.514333 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data podName:ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd nodeName:}" failed. No retries permitted until 2025-11-25 14:54:17.514314693 +0000 UTC m=+1749.117727764 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data") pod "rabbitmq-server-0" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd") : configmap "rabbitmq-config-data" not found Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.559878 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.560147 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-log" containerID="cri-o://05364380d2e63d55c39d597c799b96575c6c47041dc20bb528f86650c9382e54" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.560488 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-httpd" containerID="cri-o://41b342be3eecc98813aaf9b468892bd643aa0e34ac73a85cd3d396c5e9f92e0a" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.567401 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.567709 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="cinder-scheduler" containerID="cri-o://99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.568179 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="probe" containerID="cri-o://6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.592557 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.595267 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api-log" containerID="cri-o://bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.595897 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api" containerID="cri-o://ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.639101 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.639379 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-log" containerID="cri-o://af243b8f2bb1e9dcb67147292116b8c6f0d542da633f05a48c0fdb0824119183" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.639955 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-httpd" containerID="cri-o://04433574a192b4e76f4a1047133e9f8463a81e6a8e9269e4a2083629111afa0d" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: E1125 14:54:15.771721 4879 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Nov 25 14:54:15 crc kubenswrapper[4879]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 25 14:54:15 crc kubenswrapper[4879]: + source /usr/local/bin/container-scripts/functions Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNBridge=br-int Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNRemote=tcp:localhost:6642 Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNEncapType=geneve Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNAvailabilityZones= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ EnableChassisAsGateway=true Nov 25 14:54:15 crc kubenswrapper[4879]: ++ PhysicalNetworks= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNHostName= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 25 14:54:15 crc kubenswrapper[4879]: ++ ovs_dir=/var/lib/openvswitch Nov 25 14:54:15 crc kubenswrapper[4879]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 25 14:54:15 crc kubenswrapper[4879]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 25 14:54:15 crc kubenswrapper[4879]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + sleep 0.5 Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + sleep 0.5 Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + cleanup_ovsdb_server_semaphore Nov 25 14:54:15 crc kubenswrapper[4879]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 25 14:54:15 crc kubenswrapper[4879]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 25 14:54:15 crc kubenswrapper[4879]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-2n7ff" message=< Nov 25 14:54:15 crc kubenswrapper[4879]: Exiting ovsdb-server (5) [ OK ] Nov 25 14:54:15 crc kubenswrapper[4879]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 25 14:54:15 crc kubenswrapper[4879]: + source /usr/local/bin/container-scripts/functions Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNBridge=br-int Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNRemote=tcp:localhost:6642 Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNEncapType=geneve Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNAvailabilityZones= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ EnableChassisAsGateway=true Nov 25 14:54:15 crc kubenswrapper[4879]: ++ PhysicalNetworks= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNHostName= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 25 14:54:15 crc kubenswrapper[4879]: ++ ovs_dir=/var/lib/openvswitch Nov 25 14:54:15 crc kubenswrapper[4879]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 25 14:54:15 crc kubenswrapper[4879]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 25 14:54:15 crc kubenswrapper[4879]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + sleep 0.5 Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + sleep 0.5 Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + cleanup_ovsdb_server_semaphore Nov 25 14:54:15 crc kubenswrapper[4879]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 25 14:54:15 crc kubenswrapper[4879]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 25 14:54:15 crc kubenswrapper[4879]: > Nov 25 14:54:15 crc kubenswrapper[4879]: E1125 14:54:15.771768 4879 kuberuntime_container.go:691] "PreStop hook failed" err=< Nov 25 14:54:15 crc kubenswrapper[4879]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 25 14:54:15 crc kubenswrapper[4879]: + source /usr/local/bin/container-scripts/functions Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNBridge=br-int Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNRemote=tcp:localhost:6642 Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNEncapType=geneve Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNAvailabilityZones= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ EnableChassisAsGateway=true Nov 25 14:54:15 crc kubenswrapper[4879]: ++ PhysicalNetworks= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ OVNHostName= Nov 25 14:54:15 crc kubenswrapper[4879]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 25 14:54:15 crc kubenswrapper[4879]: ++ ovs_dir=/var/lib/openvswitch Nov 25 14:54:15 crc kubenswrapper[4879]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 25 14:54:15 crc kubenswrapper[4879]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 25 14:54:15 crc kubenswrapper[4879]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + sleep 0.5 Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + sleep 0.5 Nov 25 14:54:15 crc kubenswrapper[4879]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 25 14:54:15 crc kubenswrapper[4879]: + cleanup_ovsdb_server_semaphore Nov 25 14:54:15 crc kubenswrapper[4879]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 25 14:54:15 crc kubenswrapper[4879]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 25 14:54:15 crc kubenswrapper[4879]: > pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" containerID="cri-o://092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.771815 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" containerID="cri-o://092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" gracePeriod=29 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.833359 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="ovsdbserver-sb" containerID="cri-o://4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1" gracePeriod=300 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.837072 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" containerID="cri-o://8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" gracePeriod=29 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.910052 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284aa011-0c93-49d5-a07e-4580b44f1cdc" path="/var/lib/kubelet/pods/284aa011-0c93-49d5-a07e-4580b44f1cdc/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.911704 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41a3bd8d-a2e0-401e-b2f6-10f076e3710e" path="/var/lib/kubelet/pods/41a3bd8d-a2e0-401e-b2f6-10f076e3710e/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.913076 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="480a7b19-c9e0-41c2-b4cd-af083572f083" path="/var/lib/kubelet/pods/480a7b19-c9e0-41c2-b4cd-af083572f083/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.914860 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64f062b9-8008-483b-b61c-07c621b06e67" path="/var/lib/kubelet/pods/64f062b9-8008-483b-b61c-07c621b06e67/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.915696 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9620e674-5089-4560-8b81-cf2399d0ae7c" path="/var/lib/kubelet/pods/9620e674-5089-4560-8b81-cf2399d0ae7c/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.916400 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b010f021-2ab3-424f-910a-68f969e93561" path="/var/lib/kubelet/pods/b010f021-2ab3-424f-910a-68f969e93561/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.919418 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e" path="/var/lib/kubelet/pods/b05c6fcd-cd96-4ace-b9e0-a2b63e3c644e/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.920178 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed169ce2-81b3-4579-8f37-f45052a7b15d" path="/var/lib/kubelet/pods/ed169ce2-81b3-4579-8f37-f45052a7b15d/volumes" Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921015 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921064 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-66d45fd54f-j9t95"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921083 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921096 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6c8985d949-st5vd"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921144 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921161 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921177 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921192 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-76bcc454bb-qmx8f"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921219 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7b6ffb6c8b-jdb4h"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921233 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pgffl"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921248 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-c293-account-create-nmhlw"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921264 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-6f68cc547f-bvplz"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921291 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-c293-account-create-nmhlw"] Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.921604 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-6f68cc547f-bvplz" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker-log" containerID="cri-o://a80540e1f8d0795a7d0f08a859ed3eccf014e5be89d1ab61902666a3115d369e" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.922144 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6c8985d949-st5vd" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-httpd" containerID="cri-o://3f48970c185dfaf1d95c8ae3db2de1e8b88ce02bf6cadd6e87e1ea0d966ae78b" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.922872 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6c8985d949-st5vd" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-server" containerID="cri-o://1f411d7aa7397b9a94d385fe775d63a5aaf0a0e6eba3463194e645675a3ae6cf" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.923435 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-log" containerID="cri-o://3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.923584 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-metadata" containerID="cri-o://bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.923654 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-6f68cc547f-bvplz" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker" containerID="cri-o://8bab8985c7010c6b6dd0f0feb2d6a0fbbc3be6f8716197ac4c3a366eb018ce99" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.925259 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-66d45fd54f-j9t95" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-api" containerID="cri-o://28f1ee62e8f0217666143217990ac5ddfaf70ba0a32edec6f7c649da3cf673b3" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.925448 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-api" containerID="cri-o://af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.925671 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-66d45fd54f-j9t95" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-httpd" containerID="cri-o://05f0aa550451ea4ec5584e060eeaaaa5e3700f2bd21f70929ff15b059676284f" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.926011 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api-log" containerID="cri-o://0085d2813cd8a19fc5a40a396d0d21eeddfa493fb910f17688db09fd0fd10a5e" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.926230 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api" containerID="cri-o://a140b33b646cb8bebf32e5c01b72ae0d72d960de2557e0cf5cf92d5509342139" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.926385 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-log" containerID="cri-o://beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.926596 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener-log" containerID="cri-o://4bbe35807a1a8ba04bc01121bc68265add08a5e4d443ccd4ad29d5ac1402a13e" gracePeriod=30 Nov 25 14:54:15 crc kubenswrapper[4879]: I1125 14:54:15.926741 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener" containerID="cri-o://dd9719475c4335a86036dc249bfd8e7b5fc7dc1a22a25d830de6783cc81e7d70" gracePeriod=30 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.016388 4879 generic.go:334] "Generic (PLEG): container finished" podID="e81331ae-5592-4d18-8116-ef1ef9520145" containerID="350d4e7b8e9d631147fe9a755af14db63aa3e7ee234ef7375cd6205d968d2327" exitCode=2 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.016428 4879 generic.go:334] "Generic (PLEG): container finished" podID="e81331ae-5592-4d18-8116-ef1ef9520145" containerID="4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.016471 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e81331ae-5592-4d18-8116-ef1ef9520145","Type":"ContainerDied","Data":"350d4e7b8e9d631147fe9a755af14db63aa3e7ee234ef7375cd6205d968d2327"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.016505 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e81331ae-5592-4d18-8116-ef1ef9520145","Type":"ContainerDied","Data":"4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.020815 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.021034 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://af2731b4880c2c108e4f64495391dcd175a79df8df0d6cf81c3c2064f645d782" gracePeriod=30 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.038439 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pgffl"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.107917 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.108175 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerName="nova-cell0-conductor-conductor" containerID="cri-o://384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" gracePeriod=30 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.112116 4879 generic.go:334] "Generic (PLEG): container finished" podID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerID="05364380d2e63d55c39d597c799b96575c6c47041dc20bb528f86650c9382e54" exitCode=143 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.112234 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743","Type":"ContainerDied","Data":"05364380d2e63d55c39d597c799b96575c6c47041dc20bb528f86650c9382e54"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.132910 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-h2tdk"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.140877 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc is running failed: container process not found" containerID="59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.147718 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc is running failed: container process not found" containerID="59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.148073 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc is running failed: container process not found" containerID="59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.148131 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="ovsdbserver-nb" Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.155094 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-h2tdk"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.172084 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.172356 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="ee22f7f3-e4e8-4166-87bd-ca7833654410" containerName="nova-cell1-conductor-conductor" containerID="cri-o://de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966" gracePeriod=30 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.174059 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qncf"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.188428 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6qncf"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.192156 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.196467 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0d3273dc-a6fa-43b7-8225-7a175f55da77" containerName="nova-scheduler-scheduler" containerID="cri-o://e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65" gracePeriod=30 Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.287869 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.301514 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.312419 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:16 crc kubenswrapper[4879]: E1125 14:54:16.312498 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerName="nova-cell0-conductor-conductor" Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.503469 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f4987514-2183-451f-98a0-3942895acd0f/ovsdbserver-nb/0.log" Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.503660 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f4987514-2183-451f-98a0-3942895acd0f","Type":"ContainerDied","Data":"1403afd49d3f224b05842344fa705db2dabb2e32583b114ad65cecaab8f2f384"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.503690 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1403afd49d3f224b05842344fa705db2dabb2e32583b114ad65cecaab8f2f384" Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.507179 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerName="rabbitmq" containerID="cri-o://95617f394d3d7080ab4131a8c7c59dc4d2711ba5f40712fcaa511fe4dd6ad911" gracePeriod=604800 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.509407 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerName="rabbitmq" containerID="cri-o://9d10b0eee6c5fd073f93c286eefe0e8c01139b9655565d8692ef2eddad0fce94" gracePeriod=604800 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.514196 4879 generic.go:334] "Generic (PLEG): container finished" podID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerID="f091e7cd3e3f4ba1a8760e4a6cd4c1fa2c752f285fc9bff0e20deff09ae868d2" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.514310 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w" event={"ID":"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff","Type":"ContainerDied","Data":"f091e7cd3e3f4ba1a8760e4a6cd4c1fa2c752f285fc9bff0e20deff09ae868d2"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.532049 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-q22r5_1d972e68-542e-456d-9b40-5cf7aa4b68c7/openstack-network-exporter/0.log" Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.532235 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-q22r5" event={"ID":"1d972e68-542e-456d-9b40-5cf7aa4b68c7","Type":"ContainerDied","Data":"53a7ae79e2edc5c0900af3439c37d0c45f9356ae834d1f876f1849d3b1824fce"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.532268 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53a7ae79e2edc5c0900af3439c37d0c45f9356ae834d1f876f1849d3b1824fce" Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664407 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="d24b61816a0df221f5cca68298192d65a0db4ffd65d2f4a3f373892fc2581637" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664688 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="9aa6cecc8f842351e35fda9d74c697191565ff3b682681037963b8761a8ddb66" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664696 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="44f30f9bf7a2177883d8a6f1b7f870687899a5c37317a986aa4bf85dbf743403" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664718 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="49bdf74c01b8d49e4758eb2d1f183fd8edba4323eaf30b7d8764ca72f601ca8e" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664724 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="5c9cb1ae5da818f9561819c0427fef18049f551233eaa6475166bbfe4e96a29e" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664732 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="78e26b5f462da8135c185dc405d7bb3e40a86f3a0a756b228397651aec46fca7" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664739 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="78e8c927e3ebbf38f18921232f416853c52caaa147f9bd6e0b42cb5c79ac392f" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664745 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="0e0569b112382ff911d6d5ffb10cba08cccdc02f2dc893c1ebc01b9c2863ce6b" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664752 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="0f3579270e9a0136c7f68a5c3e04c11ba1d26b44c0c1ceb3b31b4cbca3cf4ba7" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664758 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="eef1d095ea350449cd4e4c13b9b72afe1590ad3b549d49103ddfa4d450adeab0" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664765 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="20f98654045b89872f5df2f364f5b15e9060829a694cc0678d30a79c4ecdb272" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664813 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"d24b61816a0df221f5cca68298192d65a0db4ffd65d2f4a3f373892fc2581637"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664839 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"9aa6cecc8f842351e35fda9d74c697191565ff3b682681037963b8761a8ddb66"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664850 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"44f30f9bf7a2177883d8a6f1b7f870687899a5c37317a986aa4bf85dbf743403"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664859 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"49bdf74c01b8d49e4758eb2d1f183fd8edba4323eaf30b7d8764ca72f601ca8e"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664867 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"5c9cb1ae5da818f9561819c0427fef18049f551233eaa6475166bbfe4e96a29e"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664876 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"78e26b5f462da8135c185dc405d7bb3e40a86f3a0a756b228397651aec46fca7"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664884 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"78e8c927e3ebbf38f18921232f416853c52caaa147f9bd6e0b42cb5c79ac392f"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664892 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"0e0569b112382ff911d6d5ffb10cba08cccdc02f2dc893c1ebc01b9c2863ce6b"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664900 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"0f3579270e9a0136c7f68a5c3e04c11ba1d26b44c0c1ceb3b31b4cbca3cf4ba7"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664908 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"eef1d095ea350449cd4e4c13b9b72afe1590ad3b549d49103ddfa4d450adeab0"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.664916 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"20f98654045b89872f5df2f364f5b15e9060829a694cc0678d30a79c4ecdb272"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.674902 4879 generic.go:334] "Generic (PLEG): container finished" podID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" containerID="20f8e7a470342784d528dc91bc65940874d5348dbae47cccff1c3f3137264375" exitCode=137 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.707777 4879 generic.go:334] "Generic (PLEG): container finished" podID="1cc2c187-456f-439a-a4b2-33dda7946308" containerID="af243b8f2bb1e9dcb67147292116b8c6f0d542da633f05a48c0fdb0824119183" exitCode=143 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.707896 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1cc2c187-456f-439a-a4b2-33dda7946308","Type":"ContainerDied","Data":"af243b8f2bb1e9dcb67147292116b8c6f0d542da633f05a48c0fdb0824119183"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.746616 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement30f0-account-delete-s2wtk"] Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.773347 4879 generic.go:334] "Generic (PLEG): container finished" podID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerID="1b6bde64e67dfaaeb2ea7d1af1b7e358481fefd8e638d1c70034361c14cd5032" exitCode=143 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.773424 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94fd8ccd4-vp796" event={"ID":"73963218-ce5b-4813-8224-27ad7b69d0b3","Type":"ContainerDied","Data":"1b6bde64e67dfaaeb2ea7d1af1b7e358481fefd8e638d1c70034361c14cd5032"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.816271 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="galera" containerID="cri-o://3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8" gracePeriod=30 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.841608 4879 generic.go:334] "Generic (PLEG): container finished" podID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerID="bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086" exitCode=143 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.841777 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"56f930a5-3344-4b7f-90d4-10a4b758e740","Type":"ContainerDied","Data":"bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086"} Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.870009 4879 generic.go:334] "Generic (PLEG): container finished" podID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerID="64210090c0d1d50d340e81ed21d95f311647c849f5851a5b2d1cf4494b8483ae" exitCode=0 Nov 25 14:54:16 crc kubenswrapper[4879]: I1125 14:54:16.870136 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" event={"ID":"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc","Type":"ContainerDied","Data":"64210090c0d1d50d340e81ed21d95f311647c849f5851a5b2d1cf4494b8483ae"} Nov 25 14:54:17 crc kubenswrapper[4879]: E1125 14:54:17.106152 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:17 crc kubenswrapper[4879]: E1125 14:54:17.106256 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data podName:c1814b22-d1b3-4426-9fa2-f613640f63e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:21.106237986 +0000 UTC m=+1752.709651057 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data") pod "rabbitmq-cell1-server-0" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8") : configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.150981 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder6256-account-delete-txbhm"] Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.161108 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance4d84-account-delete-2z9mh"] Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.199777 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican97a3-account-delete-gdmqs"] Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.320843 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f4987514-2183-451f-98a0-3942895acd0f/ovsdbserver-nb/0.log" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.320935 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.358452 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-q22r5_1d972e68-542e-456d-9b40-5cf7aa4b68c7/openstack-network-exporter/0.log" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.358536 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412531 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-ovsdbserver-nb-tls-certs\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412596 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-scripts\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412657 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-combined-ca-bundle\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412776 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-metrics-certs-tls-certs\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412796 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412841 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlphc\" (UniqueName: \"kubernetes.io/projected/f4987514-2183-451f-98a0-3942895acd0f-kube-api-access-tlphc\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412913 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4987514-2183-451f-98a0-3942895acd0f-ovsdb-rundir\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.412946 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-config\") pod \"f4987514-2183-451f-98a0-3942895acd0f\" (UID: \"f4987514-2183-451f-98a0-3942895acd0f\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.414653 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-scripts" (OuterVolumeSpecName: "scripts") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.415095 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4987514-2183-451f-98a0-3942895acd0f-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.415632 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-config" (OuterVolumeSpecName: "config") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.415659 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f4987514-2183-451f-98a0-3942895acd0f-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.415727 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.424264 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.429360 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4987514-2183-451f-98a0-3942895acd0f-kube-api-access-tlphc" (OuterVolumeSpecName: "kube-api-access-tlphc") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "kube-api-access-tlphc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.432702 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.443473 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517155 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517248 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-ovn-controller-tls-certs\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517270 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-combined-ca-bundle\") pod \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517396 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d972e68-542e-456d-9b40-5cf7aa4b68c7-config\") pod \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517421 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config\") pod \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517482 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovn-rundir\") pod \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517514 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hklf2\" (UniqueName: \"kubernetes.io/projected/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-kube-api-access-hklf2\") pod \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517582 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-scripts\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517622 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-combined-ca-bundle\") pod \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517652 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-metrics-certs-tls-certs\") pod \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517673 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config-secret\") pod \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\" (UID: \"e86acc2c-393a-40eb-b8bb-1cda1ef3c298\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517700 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-combined-ca-bundle\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517740 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovs-rundir\") pod \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.517764 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-log-ovn\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.518247 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqbdb\" (UniqueName: \"kubernetes.io/projected/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-kube-api-access-bqbdb\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.518314 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87f6g\" (UniqueName: \"kubernetes.io/projected/1d972e68-542e-456d-9b40-5cf7aa4b68c7-kube-api-access-87f6g\") pod \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\" (UID: \"1d972e68-542e-456d-9b40-5cf7aa4b68c7\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.518351 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run-ovn\") pod \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\" (UID: \"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.518929 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.518951 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlphc\" (UniqueName: \"kubernetes.io/projected/f4987514-2183-451f-98a0-3942895acd0f-kube-api-access-tlphc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.518966 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4987514-2183-451f-98a0-3942895acd0f-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.519700 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.520046 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run" (OuterVolumeSpecName: "var-run") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.520198 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: E1125 14:54:17.521062 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 25 14:54:17 crc kubenswrapper[4879]: E1125 14:54:17.521620 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data podName:ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd nodeName:}" failed. No retries permitted until 2025-11-25 14:54:21.521580286 +0000 UTC m=+1753.124993557 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data") pod "rabbitmq-server-0" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd") : configmap "rabbitmq-config-data" not found Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.523794 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d972e68-542e-456d-9b40-5cf7aa4b68c7-config" (OuterVolumeSpecName: "config") pod "1d972e68-542e-456d-9b40-5cf7aa4b68c7" (UID: "1d972e68-542e-456d-9b40-5cf7aa4b68c7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.523838 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "1d972e68-542e-456d-9b40-5cf7aa4b68c7" (UID: "1d972e68-542e-456d-9b40-5cf7aa4b68c7"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.523803 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "1d972e68-542e-456d-9b40-5cf7aa4b68c7" (UID: "1d972e68-542e-456d-9b40-5cf7aa4b68c7"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.524532 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-scripts" (OuterVolumeSpecName: "scripts") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.540419 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-kube-api-access-hklf2" (OuterVolumeSpecName: "kube-api-access-hklf2") pod "e86acc2c-393a-40eb-b8bb-1cda1ef3c298" (UID: "e86acc2c-393a-40eb-b8bb-1cda1ef3c298"). InnerVolumeSpecName "kube-api-access-hklf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.565707 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0c4f1-account-delete-cfzvs"] Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.571737 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.578915 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-kube-api-access-bqbdb" (OuterVolumeSpecName: "kube-api-access-bqbdb") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "kube-api-access-bqbdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.589892 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronb847-account-delete-qvsn6"] Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.591666 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d972e68-542e-456d-9b40-5cf7aa4b68c7-kube-api-access-87f6g" (OuterVolumeSpecName: "kube-api-access-87f6g") pod "1d972e68-542e-456d-9b40-5cf7aa4b68c7" (UID: "1d972e68-542e-456d-9b40-5cf7aa4b68c7"). InnerVolumeSpecName "kube-api-access-87f6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621731 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621765 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hklf2\" (UniqueName: \"kubernetes.io/projected/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-kube-api-access-hklf2\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621778 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621787 4879 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1d972e68-542e-456d-9b40-5cf7aa4b68c7-ovs-rundir\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621796 4879 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621806 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqbdb\" (UniqueName: \"kubernetes.io/projected/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-kube-api-access-bqbdb\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621814 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87f6g\" (UniqueName: \"kubernetes.io/projected/1d972e68-542e-456d-9b40-5cf7aa4b68c7-kube-api-access-87f6g\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621823 4879 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621831 4879 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-var-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.621839 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d972e68-542e-456d-9b40-5cf7aa4b68c7-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.689894 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0199070f-75cd-4d1b-975f-e3b655a975d1" path="/var/lib/kubelet/pods/0199070f-75cd-4d1b-975f-e3b655a975d1/volumes" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.691432 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20c70c15-43aa-44ee-8a6c-ae31b460ac5e" path="/var/lib/kubelet/pods/20c70c15-43aa-44ee-8a6c-ae31b460ac5e/volumes" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.692690 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d83638d7-7015-4b89-b959-13bd8c563ad4" path="/var/lib/kubelet/pods/d83638d7-7015-4b89-b959-13bd8c563ad4/volumes" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.697076 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc3c22fd-7723-4f81-af93-2cf3a150cd08" path="/var/lib/kubelet/pods/dc3c22fd-7723-4f81-af93-2cf3a150cd08/volumes" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.723329 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-sb\") pod \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.723426 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-svc\") pod \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.723507 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-442dw\" (UniqueName: \"kubernetes.io/projected/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-kube-api-access-442dw\") pod \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.723631 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-config\") pod \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.723664 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-nb\") pod \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.723712 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-swift-storage-0\") pod \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\" (UID: \"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc\") " Nov 25 14:54:17 crc kubenswrapper[4879]: W1125 14:54:17.732245 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5944a1b_2616_48bd_9695_32641324e1c2.slice/crio-ef5dfd67f0637a7d84997bc98f60705c4a91a04f1ccbc3ae4162012401f1df1f WatchSource:0}: Error finding container ef5dfd67f0637a7d84997bc98f60705c4a91a04f1ccbc3ae4162012401f1df1f: Status 404 returned error can't find the container with id ef5dfd67f0637a7d84997bc98f60705c4a91a04f1ccbc3ae4162012401f1df1f Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.802929 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-kube-api-access-442dw" (OuterVolumeSpecName: "kube-api-access-442dw") pod "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" (UID: "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc"). InnerVolumeSpecName "kube-api-access-442dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.826499 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-442dw\" (UniqueName: \"kubernetes.io/projected/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-kube-api-access-442dw\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.920113 4879 generic.go:334] "Generic (PLEG): container finished" podID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerID="0085d2813cd8a19fc5a40a396d0d21eeddfa493fb910f17688db09fd0fd10a5e" exitCode=143 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.920259 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" event={"ID":"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb","Type":"ContainerDied","Data":"0085d2813cd8a19fc5a40a396d0d21eeddfa493fb910f17688db09fd0fd10a5e"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.922431 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican97a3-account-delete-gdmqs" event={"ID":"ea2d8cdd-6c47-4cf7-b336-933762d2c445","Type":"ContainerStarted","Data":"97a711b8e400e1ab873d99420bd77255f267f1d2a213a272fd6a0c7f4dad5819"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.936324 4879 generic.go:334] "Generic (PLEG): container finished" podID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerID="3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43" exitCode=143 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.936417 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6391ba8e-71b4-44d3-8a99-14ff66c61604","Type":"ContainerDied","Data":"3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.942837 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement30f0-account-delete-s2wtk" event={"ID":"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc","Type":"ContainerStarted","Data":"bd0fcabe281def1397d420f5b8f5e69ab61657dc43fdc6e4701f238ec1d81dcc"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.951406 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance4d84-account-delete-2z9mh" event={"ID":"ca53b222-8018-4445-aa86-5401dbc847b5","Type":"ContainerStarted","Data":"36424a820326e2bcb9b581c795d4ca353c4c72c54677e8a6f4d43be34d1f7512"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.951450 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance4d84-account-delete-2z9mh" event={"ID":"ca53b222-8018-4445-aa86-5401dbc847b5","Type":"ContainerStarted","Data":"8a6d9543a82b0824291a50ceb6499a7f01849775bd41debd2a6f8ce5672d7c37"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.958522 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" event={"ID":"a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc","Type":"ContainerDied","Data":"1f3f161b98d3430ddaf732a09247da2c3322b54ab8ef7ffbfd401643b99f026e"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.958668 4879 scope.go:117] "RemoveContainer" containerID="64210090c0d1d50d340e81ed21d95f311647c849f5851a5b2d1cf4494b8483ae" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.958826 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-zs5xv" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.961742 4879 generic.go:334] "Generic (PLEG): container finished" podID="4835e527-a539-4cc7-8730-d75f0c5af849" containerID="beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343" exitCode=143 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.961796 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4835e527-a539-4cc7-8730-d75f0c5af849","Type":"ContainerDied","Data":"beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.965409 4879 generic.go:334] "Generic (PLEG): container finished" podID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerID="dd9719475c4335a86036dc249bfd8e7b5fc7dc1a22a25d830de6783cc81e7d70" exitCode=0 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.965439 4879 generic.go:334] "Generic (PLEG): container finished" podID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerID="4bbe35807a1a8ba04bc01121bc68265add08a5e4d443ccd4ad29d5ac1402a13e" exitCode=143 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.965488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" event={"ID":"6b33fd48-a5ae-4916-93f5-0675f1cc8bca","Type":"ContainerDied","Data":"dd9719475c4335a86036dc249bfd8e7b5fc7dc1a22a25d830de6783cc81e7d70"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.965545 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" event={"ID":"6b33fd48-a5ae-4916-93f5-0675f1cc8bca","Type":"ContainerDied","Data":"4bbe35807a1a8ba04bc01121bc68265add08a5e4d443ccd4ad29d5ac1402a13e"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.965555 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" event={"ID":"6b33fd48-a5ae-4916-93f5-0675f1cc8bca","Type":"ContainerDied","Data":"49e7eb55685f9b3286f992abdfc27d431d9314de3f332b4da17a6cf3f8a720c6"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.965566 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49e7eb55685f9b3286f992abdfc27d431d9314de3f332b4da17a6cf3f8a720c6" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.975631 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement30f0-account-delete-s2wtk" podStartSLOduration=4.975607625 podStartE2EDuration="4.975607625s" podCreationTimestamp="2025-11-25 14:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:54:17.969341476 +0000 UTC m=+1749.572754547" watchObservedRunningTime="2025-11-25 14:54:17.975607625 +0000 UTC m=+1749.579020706" Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.995595 4879 generic.go:334] "Generic (PLEG): container finished" podID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerID="1f411d7aa7397b9a94d385fe775d63a5aaf0a0e6eba3463194e645675a3ae6cf" exitCode=0 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.995632 4879 generic.go:334] "Generic (PLEG): container finished" podID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerID="3f48970c185dfaf1d95c8ae3db2de1e8b88ce02bf6cadd6e87e1ea0d966ae78b" exitCode=0 Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.995713 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6c8985d949-st5vd" event={"ID":"552e169f-1bf1-4d0b-802a-da9720c6a35d","Type":"ContainerDied","Data":"1f411d7aa7397b9a94d385fe775d63a5aaf0a0e6eba3463194e645675a3ae6cf"} Nov 25 14:54:17 crc kubenswrapper[4879]: I1125 14:54:17.995740 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6c8985d949-st5vd" event={"ID":"552e169f-1bf1-4d0b-802a-da9720c6a35d","Type":"ContainerDied","Data":"3f48970c185dfaf1d95c8ae3db2de1e8b88ce02bf6cadd6e87e1ea0d966ae78b"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.000862 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance4d84-account-delete-2z9mh" podStartSLOduration=5.000839801 podStartE2EDuration="5.000839801s" podCreationTimestamp="2025-11-25 14:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:54:17.991776638 +0000 UTC m=+1749.595189709" watchObservedRunningTime="2025-11-25 14:54:18.000839801 +0000 UTC m=+1749.604252882" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.051576 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.074678 4879 generic.go:334] "Generic (PLEG): container finished" podID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerID="05f0aa550451ea4ec5584e060eeaaaa5e3700f2bd21f70929ff15b059676284f" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.074852 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d45fd54f-j9t95" event={"ID":"36963cc9-ce9a-4f42-81ac-1a5afde50592","Type":"ContainerDied","Data":"05f0aa550451ea4ec5584e060eeaaaa5e3700f2bd21f70929ff15b059676284f"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.093081 4879 generic.go:334] "Generic (PLEG): container finished" podID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.093174 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerDied","Data":"092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.095472 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-gml5w" event={"ID":"497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff","Type":"ContainerDied","Data":"3da15c460697077097ce1a124ca5f4c32832a82b8f9eb63d688851cf7ec8c3b9"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.095585 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-gml5w" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.098879 4879 generic.go:334] "Generic (PLEG): container finished" podID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerID="8bab8985c7010c6b6dd0f0feb2d6a0fbbc3be6f8716197ac4c3a366eb018ce99" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.098908 4879 generic.go:334] "Generic (PLEG): container finished" podID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerID="a80540e1f8d0795a7d0f08a859ed3eccf014e5be89d1ab61902666a3115d369e" exitCode=143 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.098966 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f68cc547f-bvplz" event={"ID":"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b","Type":"ContainerDied","Data":"8bab8985c7010c6b6dd0f0feb2d6a0fbbc3be6f8716197ac4c3a366eb018ce99"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.098994 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f68cc547f-bvplz" event={"ID":"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b","Type":"ContainerDied","Data":"a80540e1f8d0795a7d0f08a859ed3eccf014e5be89d1ab61902666a3115d369e"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.099007 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6f68cc547f-bvplz" event={"ID":"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b","Type":"ContainerDied","Data":"07f03abad3e8b757760025bf0305e5062df952236661383eba0f291893616873"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.099018 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07f03abad3e8b757760025bf0305e5062df952236661383eba0f291893616873" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.110796 4879 generic.go:334] "Generic (PLEG): container finished" podID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" containerID="af2731b4880c2c108e4f64495391dcd175a79df8df0d6cf81c3c2064f645d782" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.110935 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a901e0fb-8403-4e8d-a1b1-b3ccae942552","Type":"ContainerDied","Data":"af2731b4880c2c108e4f64495391dcd175a79df8df0d6cf81c3c2064f645d782"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.130107 4879 generic.go:334] "Generic (PLEG): container finished" podID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerID="6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.130315 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dd903399-aa23-4f0d-93fc-4c7a5f454750","Type":"ContainerDied","Data":"6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.140219 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.150706 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0c4f1-account-delete-cfzvs" event={"ID":"e5944a1b-2616-48bd-9695-32641324e1c2","Type":"ContainerStarted","Data":"ef5dfd67f0637a7d84997bc98f60705c4a91a04f1ccbc3ae4162012401f1df1f"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.156166 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.159870 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="cd5d7ea0e9867e8b0fe2167eed20836d54166e4956798392a0fa624050ba2841" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.159905 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="f2156b4d210c38de5222f44394d8ff73450ee1af32aec0303948ee68935f943c" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.159915 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="01d6163416959123eea9546db5e997dc58a1c8fb48cc8df296356f31a71cb2f2" exitCode=0 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.159981 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"cd5d7ea0e9867e8b0fe2167eed20836d54166e4956798392a0fa624050ba2841"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.160011 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"f2156b4d210c38de5222f44394d8ff73450ee1af32aec0303948ee68935f943c"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.160027 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"01d6163416959123eea9546db5e997dc58a1c8fb48cc8df296356f31a71cb2f2"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.162692 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder6256-account-delete-txbhm" event={"ID":"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8","Type":"ContainerStarted","Data":"90394b819b657c3f3d1e53eefb4c2fe8456a14f9e59b5593dcf7c899cce4aecf"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.169242 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-q22r5" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.169366 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb847-account-delete-qvsn6" event={"ID":"4542a25b-82fa-419d-a6bb-8a2f653d88a1","Type":"ContainerStarted","Data":"60c1bb95fb667e1cfb8ef1c47bd60a33a7957edb94f58f6505db0fb2d7fd42c3"} Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.169526 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.310955 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e86acc2c-393a-40eb-b8bb-1cda1ef3c298" (UID: "e86acc2c-393a-40eb-b8bb-1cda1ef3c298"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.332310 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.360200 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.360500 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.365168 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.394065 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" (UID: "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.412832 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e86acc2c-393a-40eb-b8bb-1cda1ef3c298" (UID: "e86acc2c-393a-40eb-b8bb-1cda1ef3c298"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.419620 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d972e68-542e-456d-9b40-5cf7aa4b68c7" (UID: "1d972e68-542e-456d-9b40-5cf7aa4b68c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.437649 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" (UID: "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.462863 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-config" (OuterVolumeSpecName: "config") pod "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" (UID: "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.472290 4879 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.472377 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.472394 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.472407 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.472428 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.472441 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.484818 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e86acc2c-393a-40eb-b8bb-1cda1ef3c298" (UID: "e86acc2c-393a-40eb-b8bb-1cda1ef3c298"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.499327 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.518664 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "1d972e68-542e-456d-9b40-5cf7aa4b68c7" (UID: "1d972e68-542e-456d-9b40-5cf7aa4b68c7"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.531786 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" (UID: "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.540307 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" (UID: "a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.551517 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "f4987514-2183-451f-98a0-3942895acd0f" (UID: "f4987514-2183-451f-98a0-3942895acd0f"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.551557 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65 is running failed: container process not found" containerID="e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.552167 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65 is running failed: container process not found" containerID="e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.552851 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65 is running failed: container process not found" containerID="e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.552895 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0d3273dc-a6fa-43b7-8225-7a175f55da77" containerName="nova-scheduler-scheduler" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.581002 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.581036 4879 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.581048 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4987514-2183-451f-98a0-3942895acd0f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.581061 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.581077 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d972e68-542e-456d-9b40-5cf7aa4b68c7-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.581091 4879 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e86acc2c-393a-40eb-b8bb-1cda1ef3c298-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.590461 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" (UID: "497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.648713 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi40fc-account-delete-2nm97"] Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.687293 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.692423 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8 is running failed: container process not found" containerID="3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.694928 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8 is running failed: container process not found" containerID="3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.699784 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.699874 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8 is running failed: container process not found" containerID="3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.699894 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8 is running failed: container process not found" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="galera" Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.701334 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.701667 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.701704 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.706642 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.708100 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.721928 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.722082 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.787178 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1 is running failed: container process not found" containerID="4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.790479 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1 is running failed: container process not found" containerID="4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.792164 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1 is running failed: container process not found" containerID="4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.792222 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="ovsdbserver-sb" Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.795570 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.809482 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.816324 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 25 14:54:18 crc kubenswrapper[4879]: E1125 14:54:18.816489 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="ovn-northd" Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.910903 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.913593 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-central-agent" containerID="cri-o://946e3d89d413078837a3446d11985d445eb85d6f25cc051bdb967850d1056a98" gracePeriod=30 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.913752 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="sg-core" containerID="cri-o://10a617444ae16388f07c8ef9802441bb3504b64919ce0e9b060337efd73ed89a" gracePeriod=30 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.913770 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="proxy-httpd" containerID="cri-o://cbadc53e85256e4bfdbe1b1d3b2ce2bbe19cc469e3ffc3a3cb5d56a035939efe" gracePeriod=30 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.913783 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-notification-agent" containerID="cri-o://0b47fab24bee0e38de52810e8aac9e447edcc833a507ca4dc0bc3be5ed22829d" gracePeriod=30 Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.928900 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:54:18 crc kubenswrapper[4879]: I1125 14:54:18.929335 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" containerName="kube-state-metrics" containerID="cri-o://d1174852e93b7c0b41154253368f50c3ee1e1d604b6b7130605e291d51af060e" gracePeriod=30 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.077448 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.077689 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="57c000a8-9862-4518-87aa-d818a118973c" containerName="memcached" containerID="cri-o://1093ba44c36e637d70c8ff92badde094bc3eaebb6c3b1cca2915b1d61de7025a" gracePeriod=30 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.130036 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-72m5s"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.159311 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vtvqt"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.172380 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-72m5s"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.188464 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vtvqt"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.198569 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystonee239-account-delete-47sbh"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.199299 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.199454 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.199544 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d972e68-542e-456d-9b40-5cf7aa4b68c7" containerName="openstack-network-exporter" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.199679 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d972e68-542e-456d-9b40-5cf7aa4b68c7" containerName="openstack-network-exporter" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.199769 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerName="init" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.199847 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerName="init" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.199931 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerName="dnsmasq-dns" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.200005 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerName="dnsmasq-dns" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.200097 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="ovsdbserver-nb" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.200185 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="ovsdbserver-nb" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.200288 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="openstack-network-exporter" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.200378 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="openstack-network-exporter" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.200759 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d972e68-542e-456d-9b40-5cf7aa4b68c7" containerName="openstack-network-exporter" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.201312 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" containerName="ovn-controller" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.201404 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="ovsdbserver-nb" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.201473 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4987514-2183-451f-98a0-3942895acd0f" containerName="openstack-network-exporter" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.201558 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" containerName="dnsmasq-dns" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.203216 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.211570 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-77b6b86f95-nr5cz"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.211821 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-77b6b86f95-nr5cz" podUID="df7b7503-cc0d-48fe-be8a-75f2362edebf" containerName="keystone-api" containerID="cri-o://c9cf3523b0a1a1aafb8fd8490358b7f25d0f8537ee1e95391e205a2a8c57be8d" gracePeriod=30 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.261352 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystonee239-account-delete-47sbh"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.272077 4879 generic.go:334] "Generic (PLEG): container finished" podID="ca53b222-8018-4445-aa86-5401dbc847b5" containerID="36424a820326e2bcb9b581c795d4ca353c4c72c54677e8a6f4d43be34d1f7512" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.272716 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance4d84-account-delete-2z9mh" event={"ID":"ca53b222-8018-4445-aa86-5401dbc847b5","Type":"ContainerDied","Data":"36424a820326e2bcb9b581c795d4ca353c4c72c54677e8a6f4d43be34d1f7512"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.296946 4879 generic.go:334] "Generic (PLEG): container finished" podID="0d3273dc-a6fa-43b7-8225-7a175f55da77" containerID="e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.297048 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3273dc-a6fa-43b7-8225-7a175f55da77","Type":"ContainerDied","Data":"e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.297074 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d3273dc-a6fa-43b7-8225-7a175f55da77","Type":"ContainerDied","Data":"981eae220ea3989fe532ae3c951c31fba16f7e4ab11ef8921498cebd25e4d550"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.297088 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="981eae220ea3989fe532ae3c951c31fba16f7e4ab11ef8921498cebd25e4d550" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.303401 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.329655 4879 generic.go:334] "Generic (PLEG): container finished" podID="2241b679-a172-4455-8fed-c31014efe301" containerID="10a617444ae16388f07c8ef9802441bb3504b64919ce0e9b060337efd73ed89a" exitCode=2 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.329784 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerDied","Data":"10a617444ae16388f07c8ef9802441bb3504b64919ce0e9b060337efd73ed89a"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.342427 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ff8z\" (UniqueName: \"kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.342529 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.342571 4879 generic.go:334] "Generic (PLEG): container finished" podID="ea2d8cdd-6c47-4cf7-b336-933762d2c445" containerID="672b5efbcdf268d653fbcef5135f5823cee5ae4a86ddaa91ff9d8056bcb00fc4" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.342642 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican97a3-account-delete-gdmqs" event={"ID":"ea2d8cdd-6c47-4cf7-b336-933762d2c445","Type":"ContainerDied","Data":"672b5efbcdf268d653fbcef5135f5823cee5ae4a86ddaa91ff9d8056bcb00fc4"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.360962 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-cdz4d"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.361024 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a901e0fb-8403-4e8d-a1b1-b3ccae942552","Type":"ContainerDied","Data":"6eceb79f9c3cc43c5da1cd704fe533122923636aab23d279059baefecaeebc48"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.361061 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6eceb79f9c3cc43c5da1cd704fe533122923636aab23d279059baefecaeebc48" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.379999 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6c8985d949-st5vd" event={"ID":"552e169f-1bf1-4d0b-802a-da9720c6a35d","Type":"ContainerDied","Data":"4e30297ebf7b7469ed19c893a539c1adb048a13b97bbc6f1af37524091369c1a"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.380040 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e30297ebf7b7469ed19c893a539c1adb048a13b97bbc6f1af37524091369c1a" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.396993 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-cdz4d"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.399372 4879 generic.go:334] "Generic (PLEG): container finished" podID="1cc2c187-456f-439a-a4b2-33dda7946308" containerID="04433574a192b4e76f4a1047133e9f8463a81e6a8e9269e4a2083629111afa0d" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.399514 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1cc2c187-456f-439a-a4b2-33dda7946308","Type":"ContainerDied","Data":"04433574a192b4e76f4a1047133e9f8463a81e6a8e9269e4a2083629111afa0d"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.431590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e81331ae-5592-4d18-8116-ef1ef9520145","Type":"ContainerDied","Data":"f249f05397b38fbb3270070c2671f4c759574fa503fc84f3dd1c0488218071dd"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.431637 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f249f05397b38fbb3270070c2671f4c759574fa503fc84f3dd1c0488218071dd" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.441131 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-e239-account-create-f8pc8"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.445401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.445551 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.445875 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts podName:d1296d0e-7f82-4550-ad62-9f3411aea36c nodeName:}" failed. No retries permitted until 2025-11-25 14:54:19.945842906 +0000 UTC m=+1751.549256047 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts") pod "keystonee239-account-delete-47sbh" (UID: "d1296d0e-7f82-4550-ad62-9f3411aea36c") : configmap "openstack-scripts" not found Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.445902 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ff8z\" (UniqueName: \"kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.451146 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystonee239-account-delete-47sbh"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.463708 4879 projected.go:194] Error preparing data for projected volume kube-api-access-9ff8z for pod openstack/keystonee239-account-delete-47sbh: failed to fetch token: serviceaccounts "galera-openstack" not found Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.463924 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-e239-account-create-f8pc8"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.463949 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z podName:d1296d0e-7f82-4550-ad62-9f3411aea36c nodeName:}" failed. No retries permitted until 2025-11-25 14:54:19.96389729 +0000 UTC m=+1751.567310361 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-9ff8z" (UniqueName: "kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z") pod "keystonee239-account-delete-47sbh" (UID: "d1296d0e-7f82-4550-ad62-9f3411aea36c") : failed to fetch token: serviceaccounts "galera-openstack" not found Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.465716 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi40fc-account-delete-2nm97" event={"ID":"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7","Type":"ContainerStarted","Data":"4bdd399320c939f3d2bdf59e0ed90fd94f80d881805fd845c0a6be5fefd851be"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.483569 4879 generic.go:334] "Generic (PLEG): container finished" podID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" containerID="d1174852e93b7c0b41154253368f50c3ee1e1d604b6b7130605e291d51af060e" exitCode=2 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.483827 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0","Type":"ContainerDied","Data":"d1174852e93b7c0b41154253368f50c3ee1e1d604b6b7130605e291d51af060e"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.484839 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-4svkl"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.500964 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-4svkl"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.503409 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" containerID="8a0dd2eaef1407f0c4a4c8222672807747463c45e6a7033fe52d780b921e0356" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.503516 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder6256-account-delete-txbhm" event={"ID":"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8","Type":"ContainerDied","Data":"8a0dd2eaef1407f0c4a4c8222672807747463c45e6a7033fe52d780b921e0356"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.508499 4879 generic.go:334] "Generic (PLEG): container finished" podID="a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" containerID="82900f612c16251c224cea50e5e158d9454b14945eaa777b709af96a81ee1880" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.508582 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement30f0-account-delete-s2wtk" event={"ID":"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc","Type":"ContainerDied","Data":"82900f612c16251c224cea50e5e158d9454b14945eaa777b709af96a81ee1880"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.521998 4879 generic.go:334] "Generic (PLEG): container finished" podID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerID="41b342be3eecc98813aaf9b468892bd643aa0e34ac73a85cd3d396c5e9f92e0a" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.522247 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743","Type":"ContainerDied","Data":"41b342be3eecc98813aaf9b468892bd643aa0e34ac73a85cd3d396c5e9f92e0a"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.526582 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0c4f1-account-delete-cfzvs"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.547055 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-c4f1-account-create-f8ntq"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.562201 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-c4f1-account-create-f8ntq"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.565493 4879 generic.go:334] "Generic (PLEG): container finished" podID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerID="9c3fd5bfe1c4a686adda7380350f15818f581370513fbd5cb85f18e67e6f730e" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.565599 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94fd8ccd4-vp796" event={"ID":"73963218-ce5b-4813-8224-27ad7b69d0b3","Type":"ContainerDied","Data":"9c3fd5bfe1c4a686adda7380350f15818f581370513fbd5cb85f18e67e6f730e"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.572966 4879 generic.go:334] "Generic (PLEG): container finished" podID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerID="3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8" exitCode=0 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.573016 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a19706bc-9684-4f70-a0e8-9108014cac2f","Type":"ContainerDied","Data":"3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.573219 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a19706bc-9684-4f70-a0e8-9108014cac2f","Type":"ContainerDied","Data":"659da50acbc281b21285e2d037f6e307bcb0d8341256caa802f52d2d3ee052af"} Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.573240 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="659da50acbc281b21285e2d037f6e307bcb0d8341256caa802f52d2d3ee052af" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.607897 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": dial tcp 10.217.0.211:8775: connect: connection refused" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.608276 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": dial tcp 10.217.0.211:8775: connect: connection refused" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.608337 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-lgf9c"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.629606 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-lgf9c"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.666682 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.668145 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.714217 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" containerName="galera" containerID="cri-o://9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e" gracePeriod=30 Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.722325 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.184:9311/healthcheck\": read tcp 10.217.0.2:51778->10.217.0.184:9311: read: connection reset by peer" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.722965 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.184:9311/healthcheck\": read tcp 10.217.0.2:51780->10.217.0.184:9311: read: connection reset by peer" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.754644 4879 scope.go:117] "RemoveContainer" containerID="44362b248851344f78dab41e04ffc895817c8ec86f541ed03552d529c19b1fa6" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.757024 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.796840 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="028ab6a4-5cba-48ec-af0f-6cc019d46e60" path="/var/lib/kubelet/pods/028ab6a4-5cba-48ec-af0f-6cc019d46e60/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.798661 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1753af15-90b5-420c-bd77-86c117e8dd51" path="/var/lib/kubelet/pods/1753af15-90b5-420c-bd77-86c117e8dd51/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.799834 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33f1fc15-7c2c-4fa1-94d8-704a644db8bb" path="/var/lib/kubelet/pods/33f1fc15-7c2c-4fa1-94d8-704a644db8bb/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.800492 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d7d4dbb-8a59-4b25-a339-9c581c375b4a" path="/var/lib/kubelet/pods/4d7d4dbb-8a59-4b25-a339-9c581c375b4a/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.802824 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="512a9f5d-6203-4731-b139-e3698f82f8ca" path="/var/lib/kubelet/pods/512a9f5d-6203-4731-b139-e3698f82f8ca/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.803840 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fc80e72-1910-4244-ba55-7046e4a9a5f1" path="/var/lib/kubelet/pods/5fc80e72-1910-4244-ba55-7046e4a9a5f1/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.806063 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0bbad7b-911f-46df-a620-15fd94df5326" path="/var/lib/kubelet/pods/e0bbad7b-911f-46df-a620-15fd94df5326/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.808995 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e86acc2c-393a-40eb-b8bb-1cda1ef3c298" path="/var/lib/kubelet/pods/e86acc2c-393a-40eb-b8bb-1cda1ef3c298/volumes" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.810981 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-40fc-account-create-vzsl2"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.811475 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi40fc-account-delete-2nm97"] Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.811591 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-40fc-account-create-vzsl2"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.846240 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.850607 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.852723 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.852858 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="ee22f7f3-e4e8-4166-87bd-ca7833654410" containerName="nova-cell1-conductor-conductor" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.866829 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-combined-ca-bundle\") pod \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.866923 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5hcr\" (UniqueName: \"kubernetes.io/projected/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-kube-api-access-f5hcr\") pod \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.867004 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data\") pod \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.867099 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-logs\") pod \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.867170 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data-custom\") pod \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\" (UID: \"6e4d89e1-a52e-4778-b0fb-d71662d7cc2b\") " Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.868679 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-logs" (OuterVolumeSpecName: "logs") pod "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" (UID: "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.873948 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-kube-api-access-f5hcr" (OuterVolumeSpecName: "kube-api-access-f5hcr") pod "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" (UID: "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b"). InnerVolumeSpecName "kube-api-access-f5hcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.897647 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" (UID: "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.971051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ff8z\" (UniqueName: \"kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.971114 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.971317 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5hcr\" (UniqueName: \"kubernetes.io/projected/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-kube-api-access-f5hcr\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.972016 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:19 crc kubenswrapper[4879]: I1125 14:54:19.972050 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.972133 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.972187 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts podName:d1296d0e-7f82-4550-ad62-9f3411aea36c nodeName:}" failed. No retries permitted until 2025-11-25 14:54:20.972168721 +0000 UTC m=+1752.575581792 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts") pod "keystonee239-account-delete-47sbh" (UID: "d1296d0e-7f82-4550-ad62-9f3411aea36c") : configmap "openstack-scripts" not found Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.978911 4879 projected.go:194] Error preparing data for projected volume kube-api-access-9ff8z for pod openstack/keystonee239-account-delete-47sbh: failed to fetch token: serviceaccounts "galera-openstack" not found Nov 25 14:54:19 crc kubenswrapper[4879]: E1125 14:54:19.978994 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z podName:d1296d0e-7f82-4550-ad62-9f3411aea36c nodeName:}" failed. No retries permitted until 2025-11-25 14:54:20.978970114 +0000 UTC m=+1752.582383185 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-9ff8z" (UniqueName: "kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z") pod "keystonee239-account-delete-47sbh" (UID: "d1296d0e-7f82-4550-ad62-9f3411aea36c") : failed to fetch token: serviceaccounts "galera-openstack" not found Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.005110 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data" (OuterVolumeSpecName: "config-data") pod "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" (UID: "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.073794 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.123287 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" (UID: "6e4d89e1-a52e-4778-b0fb-d71662d7cc2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.175453 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.258760 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-gml5w"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.266601 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-gml5w"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.279156 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zs5xv"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.284339 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-zs5xv"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.289340 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.327254 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.342360 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.342745 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.357456 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-q22r5"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.364816 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-q22r5"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.367396 4879 scope.go:117] "RemoveContainer" containerID="20f8e7a470342784d528dc91bc65940874d5348dbae47cccff1c3f3137264375" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.383051 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nd6pt\" (UniqueName: \"kubernetes.io/projected/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-kube-api-access-nd6pt\") pod \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.383160 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-logs\") pod \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.383283 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data-custom\") pod \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.383335 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-combined-ca-bundle\") pod \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.383465 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data\") pod \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\" (UID: \"6b33fd48-a5ae-4916-93f5-0675f1cc8bca\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.388316 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.390462 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-logs" (OuterVolumeSpecName: "logs") pod "6b33fd48-a5ae-4916-93f5-0675f1cc8bca" (UID: "6b33fd48-a5ae-4916-93f5-0675f1cc8bca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.397720 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.401476 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.401502 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6b33fd48-a5ae-4916-93f5-0675f1cc8bca" (UID: "6b33fd48-a5ae-4916-93f5-0675f1cc8bca"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.434858 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.434950 4879 scope.go:117] "RemoveContainer" containerID="f091e7cd3e3f4ba1a8760e4a6cd4c1fa2c752f285fc9bff0e20deff09ae868d2" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.436424 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-kube-api-access-nd6pt" (OuterVolumeSpecName: "kube-api-access-nd6pt") pod "6b33fd48-a5ae-4916-93f5-0675f1cc8bca" (UID: "6b33fd48-a5ae-4916-93f5-0675f1cc8bca"). InnerVolumeSpecName "kube-api-access-nd6pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: E1125 14:54:20.453863 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-9ff8z operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystonee239-account-delete-47sbh" podUID="d1296d0e-7f82-4550-ad62-9f3411aea36c" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.470623 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.472020 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.485327 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-combined-ca-bundle\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489692 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-public-tls-certs\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489755 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-run-httpd\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489774 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-galera-tls-certs\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489794 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489824 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-metrics-certs-tls-certs\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489854 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-config-data\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489884 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55hnv\" (UniqueName: \"kubernetes.io/projected/a901e0fb-8403-4e8d-a1b1-b3ccae942552-kube-api-access-55hnv\") pod \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489927 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-default\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489948 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-config-data\") pod \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489969 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-combined-ca-bundle\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.489998 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-combined-ca-bundle\") pod \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490055 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-etc-swift\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490086 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djccq\" (UniqueName: \"kubernetes.io/projected/a19706bc-9684-4f70-a0e8-9108014cac2f-kube-api-access-djccq\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490705 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-nova-novncproxy-tls-certs\") pod \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490741 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-generated\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490769 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-log-httpd\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490800 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-operator-scripts\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490846 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdb-rundir\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490875 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdbserver-sb-tls-certs\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490897 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-scripts\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490940 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490959 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-config\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.490987 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-kolla-config\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491012 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t85hb\" (UniqueName: \"kubernetes.io/projected/e81331ae-5592-4d18-8116-ef1ef9520145-kube-api-access-t85hb\") pod \"e81331ae-5592-4d18-8116-ef1ef9520145\" (UID: \"e81331ae-5592-4d18-8116-ef1ef9520145\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491033 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-vencrypt-tls-certs\") pod \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\" (UID: \"a901e0fb-8403-4e8d-a1b1-b3ccae942552\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491054 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5rfv\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-kube-api-access-r5rfv\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491071 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-combined-ca-bundle\") pod \"a19706bc-9684-4f70-a0e8-9108014cac2f\" (UID: \"a19706bc-9684-4f70-a0e8-9108014cac2f\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491088 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-internal-tls-certs\") pod \"552e169f-1bf1-4d0b-802a-da9720c6a35d\" (UID: \"552e169f-1bf1-4d0b-802a-da9720c6a35d\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491744 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nd6pt\" (UniqueName: \"kubernetes.io/projected/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-kube-api-access-nd6pt\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491758 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.491768 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.498573 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-scripts" (OuterVolumeSpecName: "scripts") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.503582 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.487032 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.503895 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e81331ae-5592-4d18-8116-ef1ef9520145-kube-api-access-t85hb" (OuterVolumeSpecName: "kube-api-access-t85hb") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "kube-api-access-t85hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.505660 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.505683 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.505836 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-config" (OuterVolumeSpecName: "config") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.506345 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.506385 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.507637 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.508366 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.509398 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.517456 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.517928 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a19706bc-9684-4f70-a0e8-9108014cac2f-kube-api-access-djccq" (OuterVolumeSpecName: "kube-api-access-djccq") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "kube-api-access-djccq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.519417 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a901e0fb-8403-4e8d-a1b1-b3ccae942552-kube-api-access-55hnv" (OuterVolumeSpecName: "kube-api-access-55hnv") pod "a901e0fb-8403-4e8d-a1b1-b3ccae942552" (UID: "a901e0fb-8403-4e8d-a1b1-b3ccae942552"). InnerVolumeSpecName "kube-api-access-55hnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.519488 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.525961 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.527768 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-kube-api-access-r5rfv" (OuterVolumeSpecName: "kube-api-access-r5rfv") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "kube-api-access-r5rfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.553235 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.576570 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592686 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-public-tls-certs\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592717 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-scripts\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592765 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-public-tls-certs\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592795 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-config\") pod \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592839 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-internal-tls-certs\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592883 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-logs\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592924 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz2dk\" (UniqueName: \"kubernetes.io/projected/4835e527-a539-4cc7-8730-d75f0c5af849-kube-api-access-cz2dk\") pod \"4835e527-a539-4cc7-8730-d75f0c5af849\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592948 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-scripts\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.592970 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-httpd-run\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593000 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-config-data\") pod \"4835e527-a539-4cc7-8730-d75f0c5af849\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593017 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvfkw\" (UniqueName: \"kubernetes.io/projected/73963218-ce5b-4813-8224-27ad7b69d0b3-kube-api-access-mvfkw\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593038 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-combined-ca-bundle\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593092 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-combined-ca-bundle\") pod \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593116 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-certs\") pod \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593156 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-combined-ca-bundle\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593182 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-combined-ca-bundle\") pod \"0d3273dc-a6fa-43b7-8225-7a175f55da77\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593515 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-internal-tls-certs\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593541 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-scripts\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593567 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpxnf\" (UniqueName: \"kubernetes.io/projected/0d3273dc-a6fa-43b7-8225-7a175f55da77-kube-api-access-zpxnf\") pod \"0d3273dc-a6fa-43b7-8225-7a175f55da77\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593603 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56f930a5-3344-4b7f-90d4-10a4b758e740-etc-machine-id\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593622 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-combined-ca-bundle\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593667 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data-custom\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593685 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-config-data\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593709 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh6mv\" (UniqueName: \"kubernetes.io/projected/56f930a5-3344-4b7f-90d4-10a4b758e740-kube-api-access-sh6mv\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593747 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-public-tls-certs\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593831 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-config-data\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593872 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-scripts\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.593888 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtd9t\" (UniqueName: \"kubernetes.io/projected/1cc2c187-456f-439a-a4b2-33dda7946308-kube-api-access-gtd9t\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.594237 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-config-data\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.594255 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-logs\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.594303 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57shw\" (UniqueName: \"kubernetes.io/projected/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-api-access-57shw\") pod \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\" (UID: \"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.594337 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.594371 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-internal-tls-certs\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.594387 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.596231 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-logs" (OuterVolumeSpecName: "logs") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.596278 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56f930a5-3344-4b7f-90d4-10a4b758e740-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602644 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4835e527-a539-4cc7-8730-d75f0c5af849-logs\") pod \"4835e527-a539-4cc7-8730-d75f0c5af849\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602736 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-internal-tls-certs\") pod \"4835e527-a539-4cc7-8730-d75f0c5af849\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602798 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-httpd-run\") pod \"1cc2c187-456f-439a-a4b2-33dda7946308\" (UID: \"1cc2c187-456f-439a-a4b2-33dda7946308\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602834 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73963218-ce5b-4813-8224-27ad7b69d0b3-logs\") pod \"73963218-ce5b-4813-8224-27ad7b69d0b3\" (UID: \"73963218-ce5b-4813-8224-27ad7b69d0b3\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602875 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-combined-ca-bundle\") pod \"4835e527-a539-4cc7-8730-d75f0c5af849\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602899 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-public-tls-certs\") pod \"4835e527-a539-4cc7-8730-d75f0c5af849\" (UID: \"4835e527-a539-4cc7-8730-d75f0c5af849\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602924 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-config-data\") pod \"0d3273dc-a6fa-43b7-8225-7a175f55da77\" (UID: \"0d3273dc-a6fa-43b7-8225-7a175f55da77\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.602977 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tt6c\" (UniqueName: \"kubernetes.io/projected/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-kube-api-access-8tt6c\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603024 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603052 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\" (UID: \"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603079 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-combined-ca-bundle\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603834 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603865 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603880 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55hnv\" (UniqueName: \"kubernetes.io/projected/a901e0fb-8403-4e8d-a1b1-b3ccae942552-kube-api-access-55hnv\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603896 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603907 4879 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603919 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603932 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djccq\" (UniqueName: \"kubernetes.io/projected/a19706bc-9684-4f70-a0e8-9108014cac2f-kube-api-access-djccq\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603947 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a19706bc-9684-4f70-a0e8-9108014cac2f-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603959 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/552e169f-1bf1-4d0b-802a-da9720c6a35d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603971 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603983 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.603998 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56f930a5-3344-4b7f-90d4-10a4b758e740-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.604011 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.604022 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e81331ae-5592-4d18-8116-ef1ef9520145-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.604032 4879 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a19706bc-9684-4f70-a0e8-9108014cac2f-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.604046 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t85hb\" (UniqueName: \"kubernetes.io/projected/e81331ae-5592-4d18-8116-ef1ef9520145-kube-api-access-t85hb\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.604060 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5rfv\" (UniqueName: \"kubernetes.io/projected/552e169f-1bf1-4d0b-802a-da9720c6a35d-kube-api-access-r5rfv\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.717899 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-scripts" (OuterVolumeSpecName: "scripts") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.721535 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.721700 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.722692 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4835e527-a539-4cc7-8730-d75f0c5af849-kube-api-access-cz2dk" (OuterVolumeSpecName: "kube-api-access-cz2dk") pod "4835e527-a539-4cc7-8730-d75f0c5af849" (UID: "4835e527-a539-4cc7-8730-d75f0c5af849"). InnerVolumeSpecName "kube-api-access-cz2dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.722790 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-scripts" (OuterVolumeSpecName: "scripts") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.728801 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.747964 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56f930a5-3344-4b7f-90d4-10a4b758e740-kube-api-access-sh6mv" (OuterVolumeSpecName: "kube-api-access-sh6mv") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "kube-api-access-sh6mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.763632 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.763942 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73963218-ce5b-4813-8224-27ad7b69d0b3-logs" (OuterVolumeSpecName: "logs") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.768211 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cc2c187-456f-439a-a4b2-33dda7946308-kube-api-access-gtd9t" (OuterVolumeSpecName: "kube-api-access-gtd9t") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "kube-api-access-gtd9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.774854 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-logs" (OuterVolumeSpecName: "logs") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.791575 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4835e527-a539-4cc7-8730-d75f0c5af849-logs" (OuterVolumeSpecName: "logs") pod "4835e527-a539-4cc7-8730-d75f0c5af849" (UID: "4835e527-a539-4cc7-8730-d75f0c5af849"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.798908 4879 generic.go:334] "Generic (PLEG): container finished" podID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerID="ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.798982 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"56f930a5-3344-4b7f-90d4-10a4b758e740","Type":"ContainerDied","Data":"ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.799022 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"56f930a5-3344-4b7f-90d4-10a4b758e740","Type":"ContainerDied","Data":"c4ef735fb769d92e45c5f3a173a119bb6a11ed2a4e29298b0700403da0985e73"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.799045 4879 scope.go:117] "RemoveContainer" containerID="ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.808852 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.829891 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx7s4\" (UniqueName: \"kubernetes.io/projected/6391ba8e-71b4-44d3-8a99-14ff66c61604-kube-api-access-cx7s4\") pod \"6391ba8e-71b4-44d3-8a99-14ff66c61604\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.829980 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-nova-metadata-tls-certs\") pod \"6391ba8e-71b4-44d3-8a99-14ff66c61604\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.832356 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs" (OuterVolumeSpecName: "logs") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.849369 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-config-data\") pod \"6391ba8e-71b4-44d3-8a99-14ff66c61604\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.849711 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs\") pod \"56f930a5-3344-4b7f-90d4-10a4b758e740\" (UID: \"56f930a5-3344-4b7f-90d4-10a4b758e740\") " Nov 25 14:54:20 crc kubenswrapper[4879]: W1125 14:54:20.850306 4879 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/56f930a5-3344-4b7f-90d4-10a4b758e740/volumes/kubernetes.io~empty-dir/logs Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.850325 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs" (OuterVolumeSpecName: "logs") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.850444 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6391ba8e-71b4-44d3-8a99-14ff66c61604-logs\") pod \"6391ba8e-71b4-44d3-8a99-14ff66c61604\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.850858 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-combined-ca-bundle\") pod \"6391ba8e-71b4-44d3-8a99-14ff66c61604\" (UID: \"6391ba8e-71b4-44d3-8a99-14ff66c61604\") " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.850909 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6391ba8e-71b4-44d3-8a99-14ff66c61604-logs" (OuterVolumeSpecName: "logs") pod "6391ba8e-71b4-44d3-8a99-14ff66c61604" (UID: "6391ba8e-71b4-44d3-8a99-14ff66c61604"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852380 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852422 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852442 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4835e527-a539-4cc7-8730-d75f0c5af849-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852457 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1cc2c187-456f-439a-a4b2-33dda7946308-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852469 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73963218-ce5b-4813-8224-27ad7b69d0b3-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852480 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56f930a5-3344-4b7f-90d4-10a4b758e740-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852500 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6391ba8e-71b4-44d3-8a99-14ff66c61604-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852512 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz2dk\" (UniqueName: \"kubernetes.io/projected/4835e527-a539-4cc7-8730-d75f0c5af849-kube-api-access-cz2dk\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852525 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852536 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852554 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh6mv\" (UniqueName: \"kubernetes.io/projected/56f930a5-3344-4b7f-90d4-10a4b758e740-kube-api-access-sh6mv\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852587 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852604 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.852616 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtd9t\" (UniqueName: \"kubernetes.io/projected/1cc2c187-456f-439a-a4b2-33dda7946308-kube-api-access-gtd9t\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.861498 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-scripts" (OuterVolumeSpecName: "scripts") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.875061 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1cc2c187-456f-439a-a4b2-33dda7946308","Type":"ContainerDied","Data":"98da8eaa2c4d2aed77c8796bba62b7f609cb8cca79843f564dc5096878ee2dda"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.875340 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.898671 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.898704 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73963218-ce5b-4813-8224-27ad7b69d0b3-kube-api-access-mvfkw" (OuterVolumeSpecName: "kube-api-access-mvfkw") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "kube-api-access-mvfkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.898779 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6391ba8e-71b4-44d3-8a99-14ff66c61604-kube-api-access-cx7s4" (OuterVolumeSpecName: "kube-api-access-cx7s4") pod "6391ba8e-71b4-44d3-8a99-14ff66c61604" (UID: "6391ba8e-71b4-44d3-8a99-14ff66c61604"). InnerVolumeSpecName "kube-api-access-cx7s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.898777 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.898816 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-kube-api-access-8tt6c" (OuterVolumeSpecName: "kube-api-access-8tt6c") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "kube-api-access-8tt6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.900397 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3273dc-a6fa-43b7-8225-7a175f55da77-kube-api-access-zpxnf" (OuterVolumeSpecName: "kube-api-access-zpxnf") pod "0d3273dc-a6fa-43b7-8225-7a175f55da77" (UID: "0d3273dc-a6fa-43b7-8225-7a175f55da77"). InnerVolumeSpecName "kube-api-access-zpxnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.903933 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d41b8b48-a0d1-4f8c-b8e8-96eee57a9743","Type":"ContainerDied","Data":"a615bf4f3b7912a99a0cdec193303baa0cc525a4b6dd7eb9274677bbdf92573b"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.904036 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-scripts" (OuterVolumeSpecName: "scripts") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.904087 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.922235 4879 scope.go:117] "RemoveContainer" containerID="bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.928178 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-api-access-57shw" (OuterVolumeSpecName: "kube-api-access-57shw") pod "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" (UID: "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0"). InnerVolumeSpecName "kube-api-access-57shw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.932320 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b33fd48-a5ae-4916-93f5-0675f1cc8bca" (UID: "6b33fd48-a5ae-4916-93f5-0675f1cc8bca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.938337 4879 generic.go:334] "Generic (PLEG): container finished" podID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerID="bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.938401 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6391ba8e-71b4-44d3-8a99-14ff66c61604","Type":"ContainerDied","Data":"bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.938446 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6391ba8e-71b4-44d3-8a99-14ff66c61604","Type":"ContainerDied","Data":"184037fbb26a2a47cbb3bbcd5e3311b5b03d0357c49f299d677581a7515577c0"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.938522 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.956645 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94fd8ccd4-vp796" event={"ID":"73963218-ce5b-4813-8224-27ad7b69d0b3","Type":"ContainerDied","Data":"1fef617d7cf95a614317c9186fe0add4c2f7399383016661beea0542d9d7dd2d"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.956770 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94fd8ccd4-vp796" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957633 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tt6c\" (UniqueName: \"kubernetes.io/projected/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-kube-api-access-8tt6c\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957691 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957705 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957720 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvfkw\" (UniqueName: \"kubernetes.io/projected/73963218-ce5b-4813-8224-27ad7b69d0b3-kube-api-access-mvfkw\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957731 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957742 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpxnf\" (UniqueName: \"kubernetes.io/projected/0d3273dc-a6fa-43b7-8225-7a175f55da77-kube-api-access-zpxnf\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957754 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957766 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx7s4\" (UniqueName: \"kubernetes.io/projected/6391ba8e-71b4-44d3-8a99-14ff66c61604-kube-api-access-cx7s4\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957777 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.957788 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57shw\" (UniqueName: \"kubernetes.io/projected/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-api-access-57shw\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.971239 4879 generic.go:334] "Generic (PLEG): container finished" podID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerID="a140b33b646cb8bebf32e5c01b72ae0d72d960de2557e0cf5cf92d5509342139" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.971321 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" event={"ID":"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb","Type":"ContainerDied","Data":"a140b33b646cb8bebf32e5c01b72ae0d72d960de2557e0cf5cf92d5509342139"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.982544 4879 generic.go:334] "Generic (PLEG): container finished" podID="2241b679-a172-4455-8fed-c31014efe301" containerID="cbadc53e85256e4bfdbe1b1d3b2ce2bbe19cc469e3ffc3a3cb5d56a035939efe" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.982590 4879 generic.go:334] "Generic (PLEG): container finished" podID="2241b679-a172-4455-8fed-c31014efe301" containerID="0b47fab24bee0e38de52810e8aac9e447edcc833a507ca4dc0bc3be5ed22829d" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.982600 4879 generic.go:334] "Generic (PLEG): container finished" podID="2241b679-a172-4455-8fed-c31014efe301" containerID="946e3d89d413078837a3446d11985d445eb85d6f25cc051bdb967850d1056a98" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.982700 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerDied","Data":"cbadc53e85256e4bfdbe1b1d3b2ce2bbe19cc469e3ffc3a3cb5d56a035939efe"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.982734 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerDied","Data":"0b47fab24bee0e38de52810e8aac9e447edcc833a507ca4dc0bc3be5ed22829d"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.982748 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerDied","Data":"946e3d89d413078837a3446d11985d445eb85d6f25cc051bdb967850d1056a98"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.989342 4879 generic.go:334] "Generic (PLEG): container finished" podID="4835e527-a539-4cc7-8730-d75f0c5af849" containerID="af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed" exitCode=0 Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.989570 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4835e527-a539-4cc7-8730-d75f0c5af849","Type":"ContainerDied","Data":"af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.989646 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4835e527-a539-4cc7-8730-d75f0c5af849","Type":"ContainerDied","Data":"6df21eafbbbf98b51fdbe3a51dbb3b42d81399bab225a48761b673a600b26cd3"} Nov 25 14:54:20 crc kubenswrapper[4879]: I1125 14:54:20.989817 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.020463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0","Type":"ContainerDied","Data":"2820b638963ed956cd317f870e010584e45dd246c1c30739a5626f3464ba4e1e"} Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.020634 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.046060 4879 generic.go:334] "Generic (PLEG): container finished" podID="57c000a8-9862-4518-87aa-d818a118973c" containerID="1093ba44c36e637d70c8ff92badde094bc3eaebb6c3b1cca2915b1d61de7025a" exitCode=0 Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.046219 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.046469 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.046574 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"57c000a8-9862-4518-87aa-d818a118973c","Type":"ContainerDied","Data":"1093ba44c36e637d70c8ff92badde094bc3eaebb6c3b1cca2915b1d61de7025a"} Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.046640 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76bcc454bb-qmx8f" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.047149 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.047668 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6f68cc547f-bvplz" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.048389 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6c8985d949-st5vd" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.048711 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.048970 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.064517 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ff8z\" (UniqueName: \"kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.064598 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts\") pod \"keystonee239-account-delete-47sbh\" (UID: \"d1296d0e-7f82-4550-ad62-9f3411aea36c\") " pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.064908 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.064979 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts podName:d1296d0e-7f82-4550-ad62-9f3411aea36c nodeName:}" failed. No retries permitted until 2025-11-25 14:54:23.064959567 +0000 UTC m=+1754.668372638 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts") pod "keystonee239-account-delete-47sbh" (UID: "d1296d0e-7f82-4550-ad62-9f3411aea36c") : configmap "openstack-scripts" not found Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.068546 4879 projected.go:194] Error preparing data for projected volume kube-api-access-9ff8z for pod openstack/keystonee239-account-delete-47sbh: failed to fetch token: serviceaccounts "galera-openstack" not found Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.068615 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z podName:d1296d0e-7f82-4550-ad62-9f3411aea36c nodeName:}" failed. No retries permitted until 2025-11-25 14:54:23.068597685 +0000 UTC m=+1754.672010746 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-9ff8z" (UniqueName: "kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z") pod "keystonee239-account-delete-47sbh" (UID: "d1296d0e-7f82-4550-ad62-9f3411aea36c") : failed to fetch token: serviceaccounts "galera-openstack" not found Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.174173 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.174250 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data podName:c1814b22-d1b3-4426-9fa2-f613640f63e8 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:29.174229806 +0000 UTC m=+1760.777642877 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data") pod "rabbitmq-cell1-server-0" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8") : configmap "rabbitmq-cell1-config-data" not found Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.231007 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.276449 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.282582 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-config-data" (OuterVolumeSpecName: "config-data") pod "a901e0fb-8403-4e8d-a1b1-b3ccae942552" (UID: "a901e0fb-8403-4e8d-a1b1-b3ccae942552"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.330562 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.333426 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.337680 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.338682 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerName="nova-cell0-conductor-conductor" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.349502 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data" (OuterVolumeSpecName: "config-data") pod "6b33fd48-a5ae-4916-93f5-0675f1cc8bca" (UID: "6b33fd48-a5ae-4916-93f5-0675f1cc8bca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.378782 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.378837 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33fd48-a5ae-4916-93f5-0675f1cc8bca-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.427586 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-config-data" (OuterVolumeSpecName: "config-data") pod "6391ba8e-71b4-44d3-8a99-14ff66c61604" (UID: "6391ba8e-71b4-44d3-8a99-14ff66c61604"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.439450 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.441396 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d3273dc-a6fa-43b7-8225-7a175f55da77" (UID: "0d3273dc-a6fa-43b7-8225-7a175f55da77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.453207 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.458196 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" (UID: "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.471425 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "a901e0fb-8403-4e8d-a1b1-b3ccae942552" (UID: "a901e0fb-8403-4e8d-a1b1-b3ccae942552"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.485761 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.485827 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.485845 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.485858 4879 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.485873 4879 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.485917 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.487779 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.492913 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.561374 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.566323 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.578134 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.581784 4879 scope.go:117] "RemoveContainer" containerID="ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745" Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.582522 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745\": container with ID starting with ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745 not found: ID does not exist" containerID="ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.582588 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745"} err="failed to get container status \"ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745\": rpc error: code = NotFound desc = could not find container \"ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745\": container with ID starting with ae51c17ae6f7a0abd202d48f9f5bafbd8355afd8f86fd83a76ac04e364243745 not found: ID does not exist" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.582616 4879 scope.go:117] "RemoveContainer" containerID="bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086" Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.583112 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086\": container with ID starting with bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086 not found: ID does not exist" containerID="bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.583169 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086"} err="failed to get container status \"bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086\": rpc error: code = NotFound desc = could not find container \"bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086\": container with ID starting with bf88d56b62f26c411b17d0231de51565e30288500dc1f028ee74eeeaece7b086 not found: ID does not exist" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.583183 4879 scope.go:117] "RemoveContainer" containerID="04433574a192b4e76f4a1047133e9f8463a81e6a8e9269e4a2083629111afa0d" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.583919 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-6f68cc547f-bvplz"] Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.584726 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.587768 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.587788 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.587800 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.587810 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.587863 4879 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 25 14:54:21 crc kubenswrapper[4879]: E1125 14:54:21.587928 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data podName:ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd nodeName:}" failed. No retries permitted until 2025-11-25 14:54:29.587907852 +0000 UTC m=+1761.191320973 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data") pod "rabbitmq-server-0" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd") : configmap "rabbitmq-config-data" not found Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.590244 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" (UID: "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.593562 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.595477 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-6f68cc547f-bvplz"] Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.603970 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a901e0fb-8403-4e8d-a1b1-b3ccae942552" (UID: "a901e0fb-8403-4e8d-a1b1-b3ccae942552"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.606210 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-config-data" (OuterVolumeSpecName: "config-data") pod "4835e527-a539-4cc7-8730-d75f0c5af849" (UID: "4835e527-a539-4cc7-8730-d75f0c5af849"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.610313 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.619543 4879 scope.go:117] "RemoveContainer" containerID="af243b8f2bb1e9dcb67147292116b8c6f0d542da633f05a48c0fdb0824119183" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.620313 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.622486 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.657096 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.658340 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d972e68-542e-456d-9b40-5cf7aa4b68c7" path="/var/lib/kubelet/pods/1d972e68-542e-456d-9b40-5cf7aa4b68c7/volumes" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.659203 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43bad898-a521-4f6d-b02f-f6ced31ae960" path="/var/lib/kubelet/pods/43bad898-a521-4f6d-b02f-f6ced31ae960/volumes" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.659825 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff" path="/var/lib/kubelet/pods/497f7b87-1bc7-4f4c-acca-97ae3a0ad5ff/volumes" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.661001 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" path="/var/lib/kubelet/pods/6e4d89e1-a52e-4778-b0fb-d71662d7cc2b/volumes" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.661926 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc" path="/var/lib/kubelet/pods/a81f4566-1afc-4f61-b2c6-e0aa9e0bd8bc/volumes" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.662747 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4987514-2183-451f-98a0-3942895acd0f" path="/var/lib/kubelet/pods/f4987514-2183-451f-98a0-3942895acd0f/volumes" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.676228 4879 scope.go:117] "RemoveContainer" containerID="41b342be3eecc98813aaf9b468892bd643aa0e34ac73a85cd3d396c5e9f92e0a" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688604 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzhk8\" (UniqueName: \"kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688663 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data\") pod \"dd903399-aa23-4f0d-93fc-4c7a5f454750\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688742 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-config-data\") pod \"57c000a8-9862-4518-87aa-d818a118973c\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688764 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-combined-ca-bundle\") pod \"dd903399-aa23-4f0d-93fc-4c7a5f454750\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688780 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688802 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-kolla-config\") pod \"57c000a8-9862-4518-87aa-d818a118973c\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688858 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-memcached-tls-certs\") pod \"57c000a8-9862-4518-87aa-d818a118973c\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688894 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm42q\" (UniqueName: \"kubernetes.io/projected/57c000a8-9862-4518-87aa-d818a118973c-kube-api-access-bm42q\") pod \"57c000a8-9862-4518-87aa-d818a118973c\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688937 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-public-tls-certs\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688953 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-ceilometer-tls-certs\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.688982 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-746dz\" (UniqueName: \"kubernetes.io/projected/dd903399-aa23-4f0d-93fc-4c7a5f454750-kube-api-access-746dz\") pod \"dd903399-aa23-4f0d-93fc-4c7a5f454750\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689002 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-config-data\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689031 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-sg-core-conf-yaml\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689052 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data-custom\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689105 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-combined-ca-bundle\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689155 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-logs\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689187 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-scripts\") pod \"dd903399-aa23-4f0d-93fc-4c7a5f454750\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689240 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-run-httpd\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689266 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-combined-ca-bundle\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689287 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689320 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h27x\" (UniqueName: \"kubernetes.io/projected/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-kube-api-access-4h27x\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689338 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-combined-ca-bundle\") pod \"57c000a8-9862-4518-87aa-d818a118973c\" (UID: \"57c000a8-9862-4518-87aa-d818a118973c\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689363 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-log-httpd\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689392 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-internal-tls-certs\") pod \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\" (UID: \"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689458 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data-custom\") pod \"dd903399-aa23-4f0d-93fc-4c7a5f454750\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689480 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd903399-aa23-4f0d-93fc-4c7a5f454750-etc-machine-id\") pod \"dd903399-aa23-4f0d-93fc-4c7a5f454750\" (UID: \"dd903399-aa23-4f0d-93fc-4c7a5f454750\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689876 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689981 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.689993 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.690003 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.690012 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.690042 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd903399-aa23-4f0d-93fc-4c7a5f454750-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dd903399-aa23-4f0d-93fc-4c7a5f454750" (UID: "dd903399-aa23-4f0d-93fc-4c7a5f454750"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.698684 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.699592 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.709435 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "57c000a8-9862-4518-87aa-d818a118973c" (UID: "57c000a8-9862-4518-87aa-d818a118973c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.710704 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.721423 4879 scope.go:117] "RemoveContainer" containerID="05364380d2e63d55c39d597c799b96575c6c47041dc20bb528f86650c9382e54" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.723956 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-logs" (OuterVolumeSpecName: "logs") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.724208 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-config-data" (OuterVolumeSpecName: "config-data") pod "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" (UID: "d41b8b48-a0d1-4f8c-b8e8-96eee57a9743"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.739858 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.749159 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-config-data" (OuterVolumeSpecName: "config-data") pod "57c000a8-9862-4518-87aa-d818a118973c" (UID: "57c000a8-9862-4518-87aa-d818a118973c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.750817 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c000a8-9862-4518-87aa-d818a118973c-kube-api-access-bm42q" (OuterVolumeSpecName: "kube-api-access-bm42q") pod "57c000a8-9862-4518-87aa-d818a118973c" (UID: "57c000a8-9862-4518-87aa-d818a118973c"). InnerVolumeSpecName "kube-api-access-bm42q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.790993 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts" (OuterVolumeSpecName: "scripts") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.791188 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.792381 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8" (OuterVolumeSpecName: "kube-api-access-lzhk8") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "kube-api-access-lzhk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.796778 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca53b222-8018-4445-aa86-5401dbc847b5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ca53b222-8018-4445-aa86-5401dbc847b5" (UID: "ca53b222-8018-4445-aa86-5401dbc847b5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.782864 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-76bcc454bb-qmx8f"] Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.802516 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-76bcc454bb-qmx8f"] Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.796413 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca53b222-8018-4445-aa86-5401dbc847b5-operator-scripts\") pod \"ca53b222-8018-4445-aa86-5401dbc847b5\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.802652 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64w7s\" (UniqueName: \"kubernetes.io/projected/ca53b222-8018-4445-aa86-5401dbc847b5-kube-api-access-64w7s\") pod \"ca53b222-8018-4445-aa86-5401dbc847b5\" (UID: \"ca53b222-8018-4445-aa86-5401dbc847b5\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.802922 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzhk8\" (UniqueName: \"kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.803058 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts\") pod \"2241b679-a172-4455-8fed-c31014efe301\" (UID: \"2241b679-a172-4455-8fed-c31014efe301\") " Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.803465 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-scripts" (OuterVolumeSpecName: "scripts") pod "dd903399-aa23-4f0d-93fc-4c7a5f454750" (UID: "dd903399-aa23-4f0d-93fc-4c7a5f454750"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: W1125 14:54:21.804061 4879 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/2241b679-a172-4455-8fed-c31014efe301/volumes/kubernetes.io~projected/kube-api-access-lzhk8 Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.804178 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8" (OuterVolumeSpecName: "kube-api-access-lzhk8") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "kube-api-access-lzhk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: W1125 14:54:21.804341 4879 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/2241b679-a172-4455-8fed-c31014efe301/volumes/kubernetes.io~secret/scripts Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.804352 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts" (OuterVolumeSpecName: "scripts") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.804613 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dd903399-aa23-4f0d-93fc-4c7a5f454750" (UID: "dd903399-aa23-4f0d-93fc-4c7a5f454750"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.804932 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd903399-aa23-4f0d-93fc-4c7a5f454750-kube-api-access-746dz" (OuterVolumeSpecName: "kube-api-access-746dz") pod "dd903399-aa23-4f0d-93fc-4c7a5f454750" (UID: "dd903399-aa23-4f0d-93fc-4c7a5f454750"). InnerVolumeSpecName "kube-api-access-746dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807068 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807097 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd903399-aa23-4f0d-93fc-4c7a5f454750-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807110 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzhk8\" (UniqueName: \"kubernetes.io/projected/2241b679-a172-4455-8fed-c31014efe301-kube-api-access-lzhk8\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807149 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807162 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807175 4879 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57c000a8-9862-4518-87aa-d818a118973c-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807190 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm42q\" (UniqueName: \"kubernetes.io/projected/57c000a8-9862-4518-87aa-d818a118973c-kube-api-access-bm42q\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807203 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-746dz\" (UniqueName: \"kubernetes.io/projected/dd903399-aa23-4f0d-93fc-4c7a5f454750-kube-api-access-746dz\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807236 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807247 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-logs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807259 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807271 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807285 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807322 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2241b679-a172-4455-8fed-c31014efe301-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807335 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.807349 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca53b222-8018-4445-aa86-5401dbc847b5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.809377 4879 scope.go:117] "RemoveContainer" containerID="bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.822518 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-kube-api-access-4h27x" (OuterVolumeSpecName: "kube-api-access-4h27x") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "kube-api-access-4h27x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.827283 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca53b222-8018-4445-aa86-5401dbc847b5-kube-api-access-64w7s" (OuterVolumeSpecName: "kube-api-access-64w7s") pod "ca53b222-8018-4445-aa86-5401dbc847b5" (UID: "ca53b222-8018-4445-aa86-5401dbc847b5"). InnerVolumeSpecName "kube-api-access-64w7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.830824 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.855023 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.913480 4879 scope.go:117] "RemoveContainer" containerID="3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.916919 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.926348 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h27x\" (UniqueName: \"kubernetes.io/projected/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-kube-api-access-4h27x\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.926388 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64w7s\" (UniqueName: \"kubernetes.io/projected/ca53b222-8018-4445-aa86-5401dbc847b5-kube-api-access-64w7s\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.926403 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.963467 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.969378 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.973476 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4835e527-a539-4cc7-8730-d75f0c5af849" (UID: "4835e527-a539-4cc7-8730-d75f0c5af849"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.974178 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-config-data" (OuterVolumeSpecName: "config-data") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.982405 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:21 crc kubenswrapper[4879]: I1125 14:54:21.994285 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-config-data" (OuterVolumeSpecName: "config-data") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.007695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-config-data" (OuterVolumeSpecName: "config-data") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033211 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033243 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033254 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033262 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033271 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033281 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.033290 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.051901 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.057360 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.061762 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6391ba8e-71b4-44d3-8a99-14ff66c61604" (UID: "6391ba8e-71b4-44d3-8a99-14ff66c61604"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.069616 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "552e169f-1bf1-4d0b-802a-da9720c6a35d" (UID: "552e169f-1bf1-4d0b-802a-da9720c6a35d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.076975 4879 generic.go:334] "Generic (PLEG): container finished" podID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerID="99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750" exitCode=0 Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.077085 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dd903399-aa23-4f0d-93fc-4c7a5f454750","Type":"ContainerDied","Data":"99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.077117 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dd903399-aa23-4f0d-93fc-4c7a5f454750","Type":"ContainerDied","Data":"cbddf5204eb85902d3478d0bbd23c509d624d02bd2e4159d19a7d70a3dafa120"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.077206 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.080026 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0c4f1-account-delete-cfzvs" event={"ID":"e5944a1b-2616-48bd-9695-32641324e1c2","Type":"ContainerStarted","Data":"d0302612476ccf74c6889b66b2329d309c9a5c72dd0400e67fa30365f9b4ac12"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.080193 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell0c4f1-account-delete-cfzvs" podUID="e5944a1b-2616-48bd-9695-32641324e1c2" containerName="mariadb-account-delete" containerID="cri-o://d0302612476ccf74c6889b66b2329d309c9a5c72dd0400e67fa30365f9b4ac12" gracePeriod=30 Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.091273 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "a901e0fb-8403-4e8d-a1b1-b3ccae942552" (UID: "a901e0fb-8403-4e8d-a1b1-b3ccae942552"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.092280 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi40fc-account-delete-2nm97" event={"ID":"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7","Type":"ContainerStarted","Data":"55880395db69d8e89d332ff4acd427f5ad61a8c22f2c6fc5803cf83bd5ee049c"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.092374 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novaapi40fc-account-delete-2nm97" podUID="3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" containerName="mariadb-account-delete" containerID="cri-o://55880395db69d8e89d332ff4acd427f5ad61a8c22f2c6fc5803cf83bd5ee049c" gracePeriod=30 Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.106674 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement30f0-account-delete-s2wtk" event={"ID":"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc","Type":"ContainerDied","Data":"bd0fcabe281def1397d420f5b8f5e69ab61657dc43fdc6e4701f238ec1d81dcc"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.106715 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd0fcabe281def1397d420f5b8f5e69ab61657dc43fdc6e4701f238ec1d81dcc" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.108600 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican97a3-account-delete-gdmqs" event={"ID":"ea2d8cdd-6c47-4cf7-b336-933762d2c445","Type":"ContainerDied","Data":"97a711b8e400e1ab873d99420bd77255f267f1d2a213a272fd6a0c7f4dad5819"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.108637 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97a711b8e400e1ab873d99420bd77255f267f1d2a213a272fd6a0c7f4dad5819" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.129577 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb847-account-delete-qvsn6" event={"ID":"4542a25b-82fa-419d-a6bb-8a2f653d88a1","Type":"ContainerStarted","Data":"ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.130211 4879 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutronb847-account-delete-qvsn6" secret="" err="secret \"galera-openstack-dockercfg-fks6b\" not found" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.134179 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapi40fc-account-delete-2nm97" podStartSLOduration=8.134160738 podStartE2EDuration="8.134160738s" podCreationTimestamp="2025-11-25 14:54:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:54:22.117854602 +0000 UTC m=+1753.721267673" watchObservedRunningTime="2025-11-25 14:54:22.134160738 +0000 UTC m=+1753.737573809" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.134570 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell0c4f1-account-delete-cfzvs" podStartSLOduration=8.134564869 podStartE2EDuration="8.134564869s" podCreationTimestamp="2025-11-25 14:54:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:54:22.099585892 +0000 UTC m=+1753.702998973" watchObservedRunningTime="2025-11-25 14:54:22.134564869 +0000 UTC m=+1753.737977940" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.135141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" event={"ID":"cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb","Type":"ContainerDied","Data":"860ae33e8db0f2927b43ebc352e3a7345732946f001b596eec9ff56b2f703de7"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.135198 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b6ffb6c8b-jdb4h" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.136518 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.136547 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.136572 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.136585 4879 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a901e0fb-8403-4e8d-a1b1-b3ccae942552-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.136598 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/552e169f-1bf1-4d0b-802a-da9720c6a35d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.149031 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder6256-account-delete-txbhm" event={"ID":"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8","Type":"ContainerDied","Data":"90394b819b657c3f3d1e53eefb4c2fe8456a14f9e59b5593dcf7c899cce4aecf"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.149079 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90394b819b657c3f3d1e53eefb4c2fe8456a14f9e59b5593dcf7c899cce4aecf" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.149209 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1cc2c187-456f-439a-a4b2-33dda7946308" (UID: "1cc2c187-456f-439a-a4b2-33dda7946308"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.151795 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"57c000a8-9862-4518-87aa-d818a118973c","Type":"ContainerDied","Data":"c7fb3a736b89e1c517e2f412d6f2d74e950e7bf107319bbacf17832e99c8e1f0"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.152049 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.157354 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2241b679-a172-4455-8fed-c31014efe301","Type":"ContainerDied","Data":"3c41c6b7425791d59c6df31c8fe655c7055a212627fce7c34e1c381dd3fe0c78"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.159216 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.163406 4879 generic.go:334] "Generic (PLEG): container finished" podID="ee22f7f3-e4e8-4166-87bd-ca7833654410" containerID="de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966" exitCode=0 Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.163488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ee22f7f3-e4e8-4166-87bd-ca7833654410","Type":"ContainerDied","Data":"de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.163520 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ee22f7f3-e4e8-4166-87bd-ca7833654410","Type":"ContainerDied","Data":"9ea62df33810e22ab4c765b9f094855bb5651dd4acd4a54732575dd34089cdd9"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.163535 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ea62df33810e22ab4c765b9f094855bb5651dd4acd4a54732575dd34089cdd9" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.165838 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance4d84-account-delete-2z9mh" event={"ID":"ca53b222-8018-4445-aa86-5401dbc847b5","Type":"ContainerDied","Data":"8a6d9543a82b0824291a50ceb6499a7f01849775bd41debd2a6f8ce5672d7c37"} Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.165870 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance4d84-account-delete-2z9mh" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.165881 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a6d9543a82b0824291a50ceb6499a7f01849775bd41debd2a6f8ce5672d7c37" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.165871 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystonee239-account-delete-47sbh" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.171813 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutronb847-account-delete-qvsn6" podStartSLOduration=9.171794598 podStartE2EDuration="9.171794598s" podCreationTimestamp="2025-11-25 14:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 14:54:22.148008167 +0000 UTC m=+1753.751421248" watchObservedRunningTime="2025-11-25 14:54:22.171794598 +0000 UTC m=+1753.775207669" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.207377 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "a19706bc-9684-4f70-a0e8-9108014cac2f" (UID: "a19706bc-9684-4f70-a0e8-9108014cac2f"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.219190 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4835e527-a539-4cc7-8730-d75f0c5af849" (UID: "4835e527-a539-4cc7-8730-d75f0c5af849"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.226107 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57c000a8-9862-4518-87aa-d818a118973c" (UID: "57c000a8-9862-4518-87aa-d818a118973c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.230259 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.233893 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4835e527-a539-4cc7-8730-d75f0c5af849" (UID: "4835e527-a539-4cc7-8730-d75f0c5af849"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.238552 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.238577 4879 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a19706bc-9684-4f70-a0e8-9108014cac2f-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.238586 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4835e527-a539-4cc7-8730-d75f0c5af849-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.238597 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc2c187-456f-439a-a4b2-33dda7946308-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.238606 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.238615 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.238809 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.238861 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:22.738846713 +0000 UTC m=+1754.342259784 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.247172 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" (UID: "dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.273263 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-config-data" (OuterVolumeSpecName: "config-data") pod "0d3273dc-a6fa-43b7-8225-7a175f55da77" (UID: "0d3273dc-a6fa-43b7-8225-7a175f55da77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.285059 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data" (OuterVolumeSpecName: "config-data") pod "56f930a5-3344-4b7f-90d4-10a4b758e740" (UID: "56f930a5-3344-4b7f-90d4-10a4b758e740"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.297994 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "57c000a8-9862-4518-87aa-d818a118973c" (UID: "57c000a8-9862-4518-87aa-d818a118973c"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.301354 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data" (OuterVolumeSpecName: "config-data") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.324307 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.333261 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.341611 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.341802 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56f930a5-3344-4b7f-90d4-10a4b758e740-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.341862 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3273dc-a6fa-43b7-8225-7a175f55da77-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.341918 4879 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/57c000a8-9862-4518-87aa-d818a118973c-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.341976 4879 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.342037 4879 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.342097 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.351808 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.352194 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.353647 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" (UID: "cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.355207 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6391ba8e-71b4-44d3-8a99-14ff66c61604" (UID: "6391ba8e-71b4-44d3-8a99-14ff66c61604"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.395108 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "73963218-ce5b-4813-8224-27ad7b69d0b3" (UID: "73963218-ce5b-4813-8224-27ad7b69d0b3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.397275 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd903399-aa23-4f0d-93fc-4c7a5f454750" (UID: "dd903399-aa23-4f0d-93fc-4c7a5f454750"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.412583 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e81331ae-5592-4d18-8116-ef1ef9520145" (UID: "e81331ae-5592-4d18-8116-ef1ef9520145"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.431204 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data" (OuterVolumeSpecName: "config-data") pod "dd903399-aa23-4f0d-93fc-4c7a5f454750" (UID: "dd903399-aa23-4f0d-93fc-4c7a5f454750"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.438056 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-config-data" (OuterVolumeSpecName: "config-data") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446461 4879 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6391ba8e-71b4-44d3-8a99-14ff66c61604-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446525 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446542 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81331ae-5592-4d18-8116-ef1ef9520145-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446553 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446565 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd903399-aa23-4f0d-93fc-4c7a5f454750-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446575 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446588 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73963218-ce5b-4813-8224-27ad7b69d0b3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446598 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.446609 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.457233 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2241b679-a172-4455-8fed-c31014efe301" (UID: "2241b679-a172-4455-8fed-c31014efe301"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.486978 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.495791 4879 scope.go:117] "RemoveContainer" containerID="bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.496344 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17\": container with ID starting with bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17 not found: ID does not exist" containerID="bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.496396 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17"} err="failed to get container status \"bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17\": rpc error: code = NotFound desc = could not find container \"bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17\": container with ID starting with bf05d791fb19e28ca9b94bce96e6d91aad4fba1871ded8c2fe0a55f4c9d6ba17 not found: ID does not exist" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.496429 4879 scope.go:117] "RemoveContainer" containerID="3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.496920 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43\": container with ID starting with 3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43 not found: ID does not exist" containerID="3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.496948 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43"} err="failed to get container status \"3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43\": rpc error: code = NotFound desc = could not find container \"3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43\": container with ID starting with 3af54c46c1f9cbd3c61880e1fd4941b00ac2ca85b89326c331d58a14fc499b43 not found: ID does not exist" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.496963 4879 scope.go:117] "RemoveContainer" containerID="9c3fd5bfe1c4a686adda7380350f15818f581370513fbd5cb85f18e67e6f730e" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.537148 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.545821 4879 scope.go:117] "RemoveContainer" containerID="1b6bde64e67dfaaeb2ea7d1af1b7e358481fefd8e638d1c70034361c14cd5032" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.552618 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2241b679-a172-4455-8fed-c31014efe301-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.553488 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.604617 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.615442 4879 scope.go:117] "RemoveContainer" containerID="af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.659357 4879 scope.go:117] "RemoveContainer" containerID="beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661079 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-config-data\") pod \"ee22f7f3-e4e8-4166-87bd-ca7833654410\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661238 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vd594\" (UniqueName: \"kubernetes.io/projected/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-kube-api-access-vd594\") pod \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661281 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffzpw\" (UniqueName: \"kubernetes.io/projected/ee22f7f3-e4e8-4166-87bd-ca7833654410-kube-api-access-ffzpw\") pod \"ee22f7f3-e4e8-4166-87bd-ca7833654410\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661308 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-combined-ca-bundle\") pod \"ee22f7f3-e4e8-4166-87bd-ca7833654410\" (UID: \"ee22f7f3-e4e8-4166-87bd-ca7833654410\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661353 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-operator-scripts\") pod \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\" (UID: \"a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661393 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea2d8cdd-6c47-4cf7-b336-933762d2c445-operator-scripts\") pod \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.661445 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5bq5\" (UniqueName: \"kubernetes.io/projected/ea2d8cdd-6c47-4cf7-b336-933762d2c445-kube-api-access-w5bq5\") pod \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\" (UID: \"ea2d8cdd-6c47-4cf7-b336-933762d2c445\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.665927 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee22f7f3-e4e8-4166-87bd-ca7833654410-kube-api-access-ffzpw" (OuterVolumeSpecName: "kube-api-access-ffzpw") pod "ee22f7f3-e4e8-4166-87bd-ca7833654410" (UID: "ee22f7f3-e4e8-4166-87bd-ca7833654410"). InnerVolumeSpecName "kube-api-access-ffzpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.667255 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea2d8cdd-6c47-4cf7-b336-933762d2c445-kube-api-access-w5bq5" (OuterVolumeSpecName: "kube-api-access-w5bq5") pod "ea2d8cdd-6c47-4cf7-b336-933762d2c445" (UID: "ea2d8cdd-6c47-4cf7-b336-933762d2c445"). InnerVolumeSpecName "kube-api-access-w5bq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.667496 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" (UID: "a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.667887 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea2d8cdd-6c47-4cf7-b336-933762d2c445-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ea2d8cdd-6c47-4cf7-b336-933762d2c445" (UID: "ea2d8cdd-6c47-4cf7-b336-933762d2c445"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.672239 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-kube-api-access-vd594" (OuterVolumeSpecName: "kube-api-access-vd594") pod "a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" (UID: "a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc"). InnerVolumeSpecName "kube-api-access-vd594". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.698911 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.718418 4879 scope.go:117] "RemoveContainer" containerID="af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.724980 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed\": container with ID starting with af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed not found: ID does not exist" containerID="af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.725032 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed"} err="failed to get container status \"af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed\": rpc error: code = NotFound desc = could not find container \"af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed\": container with ID starting with af79a9b5f5a2f465dd7a69ee0eaac6f5f5d3818900d24404509069e4595574ed not found: ID does not exist" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.725058 4879 scope.go:117] "RemoveContainer" containerID="beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.726747 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343\": container with ID starting with beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343 not found: ID does not exist" containerID="beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.726768 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343"} err="failed to get container status \"beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343\": rpc error: code = NotFound desc = could not find container \"beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343\": container with ID starting with beba9c137a5016cf8d0dfc3f0e9e2572faf487210282f8a6baa81649a01e0343 not found: ID does not exist" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.726781 4879 scope.go:117] "RemoveContainer" containerID="d1174852e93b7c0b41154253368f50c3ee1e1d604b6b7130605e291d51af060e" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.728349 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee22f7f3-e4e8-4166-87bd-ca7833654410" (UID: "ee22f7f3-e4e8-4166-87bd-ca7833654410"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.735150 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.753208 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-config-data" (OuterVolumeSpecName: "config-data") pod "ee22f7f3-e4e8-4166-87bd-ca7833654410" (UID: "ee22f7f3-e4e8-4166-87bd-ca7833654410"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.763757 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-operator-scripts\") pod \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.763810 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8z7h\" (UniqueName: \"kubernetes.io/projected/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-kube-api-access-z8z7h\") pod \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\" (UID: \"8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8\") " Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764228 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vd594\" (UniqueName: \"kubernetes.io/projected/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-kube-api-access-vd594\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764248 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffzpw\" (UniqueName: \"kubernetes.io/projected/ee22f7f3-e4e8-4166-87bd-ca7833654410-kube-api-access-ffzpw\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764260 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764271 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764282 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea2d8cdd-6c47-4cf7-b336-933762d2c445-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764292 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5bq5\" (UniqueName: \"kubernetes.io/projected/ea2d8cdd-6c47-4cf7-b336-933762d2c445-kube-api-access-w5bq5\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.764302 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee22f7f3-e4e8-4166-87bd-ca7833654410-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.764372 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.764413 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:23.764400624 +0000 UTC m=+1755.367813695 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.765062 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" (UID: "8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.775333 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-kube-api-access-z8z7h" (OuterVolumeSpecName: "kube-api-access-z8z7h") pod "8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" (UID: "8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8"). InnerVolumeSpecName "kube-api-access-z8z7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.817460 4879 scope.go:117] "RemoveContainer" containerID="6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.870761 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.870794 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8z7h\" (UniqueName: \"kubernetes.io/projected/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8-kube-api-access-z8z7h\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.873009 4879 scope.go:117] "RemoveContainer" containerID="99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.912203 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystonee239-account-delete-47sbh"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.931404 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystonee239-account-delete-47sbh"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.946481 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.953026 4879 scope.go:117] "RemoveContainer" containerID="6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.954033 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc\": container with ID starting with 6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc not found: ID does not exist" containerID="6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.954070 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc"} err="failed to get container status \"6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc\": rpc error: code = NotFound desc = could not find container \"6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc\": container with ID starting with 6ca85b6f63741effab2c6191911791b5a1ab619d95a334fce9151422827059bc not found: ID does not exist" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.954095 4879 scope.go:117] "RemoveContainer" containerID="99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750" Nov 25 14:54:22 crc kubenswrapper[4879]: E1125 14:54:22.954768 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750\": container with ID starting with 99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750 not found: ID does not exist" containerID="99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.954826 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750"} err="failed to get container status \"99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750\": rpc error: code = NotFound desc = could not find container \"99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750\": container with ID starting with 99196bf9aaccb996867a955a13713cee590f73beabdc84043f29f8ae974b1750 not found: ID does not exist" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.954863 4879 scope.go:117] "RemoveContainer" containerID="a140b33b646cb8bebf32e5c01b72ae0d72d960de2557e0cf5cf92d5509342139" Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.956042 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.966007 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.974683 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 14:54:22 crc kubenswrapper[4879]: I1125 14:54:22.987798 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:22.999898 4879 scope.go:117] "RemoveContainer" containerID="0085d2813cd8a19fc5a40a396d0d21eeddfa493fb910f17688db09fd0fd10a5e" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.000093 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.010654 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.019848 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.074792 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ff8z\" (UniqueName: \"kubernetes.io/projected/d1296d0e-7f82-4550-ad62-9f3411aea36c-kube-api-access-9ff8z\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.074827 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1296d0e-7f82-4550-ad62-9f3411aea36c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.088450 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.125290 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.144271 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7b6ffb6c8b-jdb4h"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.155046 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7b6ffb6c8b-jdb4h"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.164175 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6c8985d949-st5vd"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.172997 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-6c8985d949-st5vd"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.195529 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-94fd8ccd4-vp796"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.197214 4879 generic.go:334] "Generic (PLEG): container finished" podID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerID="9d10b0eee6c5fd073f93c286eefe0e8c01139b9655565d8692ef2eddad0fce94" exitCode=0 Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.197385 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd","Type":"ContainerDied","Data":"9d10b0eee6c5fd073f93c286eefe0e8c01139b9655565d8692ef2eddad0fce94"} Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.202736 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-94fd8ccd4-vp796"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.208432 4879 generic.go:334] "Generic (PLEG): container finished" podID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerID="95617f394d3d7080ab4131a8c7c59dc4d2711ba5f40712fcaa511fe4dd6ad911" exitCode=0 Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.208496 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1814b22-d1b3-4426-9fa2-f613640f63e8","Type":"ContainerDied","Data":"95617f394d3d7080ab4131a8c7c59dc4d2711ba5f40712fcaa511fe4dd6ad911"} Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.211321 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.215208 4879 generic.go:334] "Generic (PLEG): container finished" podID="df7b7503-cc0d-48fe-be8a-75f2362edebf" containerID="c9cf3523b0a1a1aafb8fd8490358b7f25d0f8537ee1e95391e205a2a8c57be8d" exitCode=0 Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.215288 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-77b6b86f95-nr5cz" event={"ID":"df7b7503-cc0d-48fe-be8a-75f2362edebf","Type":"ContainerDied","Data":"c9cf3523b0a1a1aafb8fd8490358b7f25d0f8537ee1e95391e205a2a8c57be8d"} Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.218240 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6256-account-delete-txbhm" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.219332 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement30f0-account-delete-s2wtk" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.219371 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.219764 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.219804 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican97a3-account-delete-gdmqs" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.222298 4879 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutronb847-account-delete-qvsn6" secret="" err="secret \"galera-openstack-dockercfg-fks6b\" not found" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.226082 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.236463 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.245021 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.258753 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.268671 4879 scope.go:117] "RemoveContainer" containerID="1093ba44c36e637d70c8ff92badde094bc3eaebb6c3b1cca2915b1d61de7025a" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.275907 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.288624 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.304021 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.319479 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.324312 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.333657 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.349323 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.373368 4879 scope.go:117] "RemoveContainer" containerID="cbadc53e85256e4bfdbe1b1d3b2ce2bbe19cc469e3ffc3a3cb5d56a035939efe" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.374849 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.380104 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.410436 4879 scope.go:117] "RemoveContainer" containerID="10a617444ae16388f07c8ef9802441bb3504b64919ce0e9b060337efd73ed89a" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.436201 4879 scope.go:117] "RemoveContainer" containerID="0b47fab24bee0e38de52810e8aac9e447edcc833a507ca4dc0bc3be5ed22829d" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.463457 4879 scope.go:117] "RemoveContainer" containerID="946e3d89d413078837a3446d11985d445eb85d6f25cc051bdb967850d1056a98" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.484877 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-public-tls-certs\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.484911 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-internal-tls-certs\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.484956 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-combined-ca-bundle\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.484982 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-config-data\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.485003 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-scripts\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.485031 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-fernet-keys\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.485069 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdgs9\" (UniqueName: \"kubernetes.io/projected/df7b7503-cc0d-48fe-be8a-75f2362edebf-kube-api-access-zdgs9\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.485170 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-credential-keys\") pod \"df7b7503-cc0d-48fe-be8a-75f2362edebf\" (UID: \"df7b7503-cc0d-48fe-be8a-75f2362edebf\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.492255 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.493093 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.496266 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.498313 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df7b7503-cc0d-48fe-be8a-75f2362edebf-kube-api-access-zdgs9" (OuterVolumeSpecName: "kube-api-access-zdgs9") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "kube-api-access-zdgs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.504928 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-scripts" (OuterVolumeSpecName: "scripts") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.519671 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.521860 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-config-data" (OuterVolumeSpecName: "config-data") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.552277 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.570489 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "df7b7503-cc0d-48fe-be8a-75f2362edebf" (UID: "df7b7503-cc0d-48fe-be8a-75f2362edebf"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586155 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d82p4\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-kube-api-access-d82p4\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586261 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-server-conf\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586298 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586320 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-confd\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586372 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-plugins\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586408 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-erlang-cookie\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586457 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586525 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-tls\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586629 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1814b22-d1b3-4426-9fa2-f613640f63e8-pod-info\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586676 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1814b22-d1b3-4426-9fa2-f613640f63e8-erlang-cookie-secret\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.586715 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-plugins-conf\") pod \"c1814b22-d1b3-4426-9fa2-f613640f63e8\" (UID: \"c1814b22-d1b3-4426-9fa2-f613640f63e8\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587096 4879 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587117 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587162 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587172 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587180 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587188 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587197 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df7b7503-cc0d-48fe-be8a-75f2362edebf-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.587205 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdgs9\" (UniqueName: \"kubernetes.io/projected/df7b7503-cc0d-48fe-be8a-75f2362edebf-kube-api-access-zdgs9\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.588339 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.588735 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.588741 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.602987 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c1814b22-d1b3-4426-9fa2-f613640f63e8-pod-info" (OuterVolumeSpecName: "pod-info") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.606220 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.607889 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-7q792"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.608187 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1814b22-d1b3-4426-9fa2-f613640f63e8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.609037 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-kube-api-access-d82p4" (OuterVolumeSpecName: "kube-api-access-d82p4") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "kube-api-access-d82p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.615117 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-7q792"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.627424 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.645749 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data" (OuterVolumeSpecName: "config-data") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.664378 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-server-conf" (OuterVolumeSpecName: "server-conf") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.684908 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d3273dc-a6fa-43b7-8225-7a175f55da77" path="/var/lib/kubelet/pods/0d3273dc-a6fa-43b7-8225-7a175f55da77/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.687736 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" path="/var/lib/kubelet/pods/1cc2c187-456f-439a-a4b2-33dda7946308/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.692735 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2241b679-a172-4455-8fed-c31014efe301" path="/var/lib/kubelet/pods/2241b679-a172-4455-8fed-c31014efe301/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.694069 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" path="/var/lib/kubelet/pods/4835e527-a539-4cc7-8730-d75f0c5af849/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695461 4879 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1814b22-d1b3-4426-9fa2-f613640f63e8-pod-info\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695502 4879 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1814b22-d1b3-4426-9fa2-f613640f63e8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695519 4879 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695532 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d82p4\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-kube-api-access-d82p4\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695544 4879 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-server-conf\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695571 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695584 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695596 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695608 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1814b22-d1b3-4426-9fa2-f613640f63e8-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.695621 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.697645 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" path="/var/lib/kubelet/pods/552e169f-1bf1-4d0b-802a-da9720c6a35d/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.698506 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" path="/var/lib/kubelet/pods/56f930a5-3344-4b7f-90d4-10a4b758e740/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.699497 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.700984 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.701553 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57c000a8-9862-4518-87aa-d818a118973c" path="/var/lib/kubelet/pods/57c000a8-9862-4518-87aa-d818a118973c/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.701568 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.701598 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.702140 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" path="/var/lib/kubelet/pods/6391ba8e-71b4-44d3-8a99-14ff66c61604/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.702734 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" path="/var/lib/kubelet/pods/6b33fd48-a5ae-4916-93f5-0675f1cc8bca/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.702838 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.705502 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.709335 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.709391 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.709657 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" path="/var/lib/kubelet/pods/73963218-ce5b-4813-8224-27ad7b69d0b3/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.712689 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" path="/var/lib/kubelet/pods/a19706bc-9684-4f70-a0e8-9108014cac2f/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.720637 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" path="/var/lib/kubelet/pods/a901e0fb-8403-4e8d-a1b1-b3ccae942552/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.725349 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" path="/var/lib/kubelet/pods/cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.729778 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1296d0e-7f82-4550-ad62-9f3411aea36c" path="/var/lib/kubelet/pods/d1296d0e-7f82-4550-ad62-9f3411aea36c/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.734553 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" path="/var/lib/kubelet/pods/d41b8b48-a0d1-4f8c-b8e8-96eee57a9743/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.737400 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.741606 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c1814b22-d1b3-4426-9fa2-f613640f63e8" (UID: "c1814b22-d1b3-4426-9fa2-f613640f63e8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.759781 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.783302 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f8f130d5-685b-4b37-89bb-b9536109c8fb/ovn-northd/0.log" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.783378 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.785401 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dab1f06c-c032-4e72-8d6b-313eb41c893e" path="/var/lib/kubelet/pods/dab1f06c-c032-4e72-8d6b-313eb41c893e/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.786519 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" path="/var/lib/kubelet/pods/dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.801022 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" path="/var/lib/kubelet/pods/dd903399-aa23-4f0d-93fc-4c7a5f454750/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.802316 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.802371 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1814b22-d1b3-4426-9fa2-f613640f63e8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.804203 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:23 crc kubenswrapper[4879]: E1125 14:54:23.804280 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:25.804257971 +0000 UTC m=+1757.407671042 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.805619 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" path="/var/lib/kubelet/pods/e81331ae-5592-4d18-8116-ef1ef9520145/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.806770 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee22f7f3-e4e8-4166-87bd-ca7833654410" path="/var/lib/kubelet/pods/ee22f7f3-e4e8-4166-87bd-ca7833654410/volumes" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.807890 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement30f0-account-delete-s2wtk"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.807919 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-30f0-account-create-7k4s4"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.807936 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement30f0-account-delete-s2wtk"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.809082 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-30f0-account-create-7k4s4"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.832760 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-hnsgr"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.842897 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-hnsgr"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.859761 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-97a3-account-create-l8gnv"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.870939 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican97a3-account-delete-gdmqs"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905088 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-combined-ca-bundle\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905159 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905199 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-northd-tls-certs\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905232 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-server-conf\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905272 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-erlang-cookie-secret\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905305 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-rundir\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905327 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-config\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905367 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-pod-info\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905389 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-confd\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905421 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-metrics-certs-tls-certs\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905474 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzsjr\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-kube-api-access-qzsjr\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905508 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-tls\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905533 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-erlang-cookie\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905573 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-plugins-conf\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905600 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtrst\" (UniqueName: \"kubernetes.io/projected/f8f130d5-685b-4b37-89bb-b9536109c8fb-kube-api-access-vtrst\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905639 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905683 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-plugins\") pod \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\" (UID: \"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905720 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-scripts\") pod \"f8f130d5-685b-4b37-89bb-b9536109c8fb\" (UID: \"f8f130d5-685b-4b37-89bb-b9536109c8fb\") " Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.905771 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.906100 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.906604 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-scripts" (OuterVolumeSpecName: "scripts") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.910824 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-97a3-account-create-l8gnv"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.911300 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.918541 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.918999 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.919484 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.920082 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-kube-api-access-qzsjr" (OuterVolumeSpecName: "kube-api-access-qzsjr") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "kube-api-access-qzsjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.920572 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-config" (OuterVolumeSpecName: "config") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.920791 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8f130d5-685b-4b37-89bb-b9536109c8fb-kube-api-access-vtrst" (OuterVolumeSpecName: "kube-api-access-vtrst") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "kube-api-access-vtrst". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.921116 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.921622 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.937437 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-pod-info" (OuterVolumeSpecName: "pod-info") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.943787 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data" (OuterVolumeSpecName: "config-data") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.943863 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican97a3-account-delete-gdmqs"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.959968 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-vlf6s"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.966711 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-vlf6s"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.968942 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.977108 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-4d84-account-create-2z2fh"] Nov 25 14:54:23 crc kubenswrapper[4879]: I1125 14:54:23.994519 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance4d84-account-delete-2z9mh"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007557 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzsjr\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-kube-api-access-qzsjr\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007593 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007608 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007618 4879 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007629 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtrst\" (UniqueName: \"kubernetes.io/projected/f8f130d5-685b-4b37-89bb-b9536109c8fb-kube-api-access-vtrst\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007639 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007649 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007660 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007671 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007698 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007710 4879 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007720 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8f130d5-685b-4b37-89bb-b9536109c8fb-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.007730 4879 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-pod-info\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.011057 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-4d84-account-create-2z2fh"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.021527 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance4d84-account-delete-2z9mh"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.032610 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lrtc8"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.043287 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lrtc8"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.050908 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-6256-account-create-kl4pl"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.059255 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.061531 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder6256-account-delete-txbhm"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.062850 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.070142 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-6256-account-create-kl4pl"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.080774 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder6256-account-delete-txbhm"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.087716 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-49whm"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.094397 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f8f130d5-685b-4b37-89bb-b9536109c8fb" (UID: "f8f130d5-685b-4b37-89bb-b9536109c8fb"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.095250 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-server-conf" (OuterVolumeSpecName: "server-conf") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.096959 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-49whm"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.112806 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.112835 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.112845 4879 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-server-conf\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.112857 4879 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f130d5-685b-4b37-89bb-b9536109c8fb-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.113251 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b847-account-create-kqzp9"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.117208 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" (UID: "ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.120644 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb847-account-delete-qvsn6"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.126618 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b847-account-create-kqzp9"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.214646 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.230865 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.230856 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd","Type":"ContainerDied","Data":"225dff5803586506635b3f1bc8c0282492f6e545c69a15f6d1e3cbb56e09f252"} Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.231000 4879 scope.go:117] "RemoveContainer" containerID="9d10b0eee6c5fd073f93c286eefe0e8c01139b9655565d8692ef2eddad0fce94" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.233569 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_f8f130d5-685b-4b37-89bb-b9536109c8fb/ovn-northd/0.log" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.233625 4879 generic.go:334] "Generic (PLEG): container finished" podID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" exitCode=139 Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.233688 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f8f130d5-685b-4b37-89bb-b9536109c8fb","Type":"ContainerDied","Data":"6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c"} Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.233723 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"f8f130d5-685b-4b37-89bb-b9536109c8fb","Type":"ContainerDied","Data":"d3722bd731d541dba0d192346e66860aa03ec2135d10ce36b86d37c8da54bd50"} Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.233791 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.241445 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1814b22-d1b3-4426-9fa2-f613640f63e8","Type":"ContainerDied","Data":"7375a763183441cc5834f0025c23e8d37def2d89a69108d3818d2fe7ff7e8c84"} Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.241571 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.244418 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-77b6b86f95-nr5cz" event={"ID":"df7b7503-cc0d-48fe-be8a-75f2362edebf","Type":"ContainerDied","Data":"811f99f745dacfd092b7a6dca49b7790a0cc6b88c2f53f79491b0f931b893e7b"} Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.244922 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-77b6b86f95-nr5cz" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.252408 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutronb847-account-delete-qvsn6" podUID="4542a25b-82fa-419d-a6bb-8a2f653d88a1" containerName="mariadb-account-delete" containerID="cri-o://ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4" gracePeriod=30 Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.259368 4879 scope.go:117] "RemoveContainer" containerID="a476d9d71c0fa760bf1e708c942cc87f51d050d16da32ce40f524f553587e4f7" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.275207 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.285671 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.290998 4879 scope.go:117] "RemoveContainer" containerID="d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.295205 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-77b6b86f95-nr5cz"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.303321 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-77b6b86f95-nr5cz"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.314081 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.323555 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.328262 4879 scope.go:117] "RemoveContainer" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.332758 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.338076 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.411447 4879 scope.go:117] "RemoveContainer" containerID="d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd" Nov 25 14:54:24 crc kubenswrapper[4879]: E1125 14:54:24.411864 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd\": container with ID starting with d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd not found: ID does not exist" containerID="d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.411893 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd"} err="failed to get container status \"d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd\": rpc error: code = NotFound desc = could not find container \"d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd\": container with ID starting with d1aa44003a948d7df77ec034c9b6900125d269544d10245b1436c30e554c18cd not found: ID does not exist" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.411919 4879 scope.go:117] "RemoveContainer" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" Nov 25 14:54:24 crc kubenswrapper[4879]: E1125 14:54:24.412344 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c\": container with ID starting with 6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c not found: ID does not exist" containerID="6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.412379 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c"} err="failed to get container status \"6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c\": rpc error: code = NotFound desc = could not find container \"6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c\": container with ID starting with 6610818adf154eb92cd77bad7c1ee470817bb3f225ce5487e1faf47b33f16d6c not found: ID does not exist" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.412398 4879 scope.go:117] "RemoveContainer" containerID="95617f394d3d7080ab4131a8c7c59dc4d2711ba5f40712fcaa511fe4dd6ad911" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.459404 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.204:6080/vnc_lite.html\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.464161 4879 scope.go:117] "RemoveContainer" containerID="cf474471d6f23adf5b05a9b2ace8992e9dc0e24725d55440de35a374bae09a8a" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.502766 4879 scope.go:117] "RemoveContainer" containerID="c9cf3523b0a1a1aafb8fd8490358b7f25d0f8537ee1e95391e205a2a8c57be8d" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.732462 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.806676 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.809870 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6c8985d949-st5vd" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.159:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.810085 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6c8985d949-st5vd" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.159:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.829524 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-combined-ca-bundle\") pod \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.829856 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-config-data\") pod \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.829893 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkmmd\" (UniqueName: \"kubernetes.io/projected/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-kube-api-access-xkmmd\") pod \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\" (UID: \"fcfac5ba-5544-4d76-af22-0c8b6b9028a7\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.853370 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-kube-api-access-xkmmd" (OuterVolumeSpecName: "kube-api-access-xkmmd") pod "fcfac5ba-5544-4d76-af22-0c8b6b9028a7" (UID: "fcfac5ba-5544-4d76-af22-0c8b6b9028a7"). InnerVolumeSpecName "kube-api-access-xkmmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.863441 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcfac5ba-5544-4d76-af22-0c8b6b9028a7" (UID: "fcfac5ba-5544-4d76-af22-0c8b6b9028a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.869009 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-config-data" (OuterVolumeSpecName: "config-data") pod "fcfac5ba-5544-4d76-af22-0c8b6b9028a7" (UID: "fcfac5ba-5544-4d76-af22-0c8b6b9028a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931732 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-default\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931802 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-kolla-config\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931841 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-combined-ca-bundle\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931868 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-galera-tls-certs\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931913 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-generated\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931932 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931955 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szfr8\" (UniqueName: \"kubernetes.io/projected/5f98073f-daa8-4796-955e-2f7d767d9125-kube-api-access-szfr8\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.931998 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-operator-scripts\") pod \"5f98073f-daa8-4796-955e-2f7d767d9125\" (UID: \"5f98073f-daa8-4796-955e-2f7d767d9125\") " Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.932287 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.932303 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.932312 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkmmd\" (UniqueName: \"kubernetes.io/projected/fcfac5ba-5544-4d76-af22-0c8b6b9028a7-kube-api-access-xkmmd\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.933003 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.933420 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.933928 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.934224 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.936876 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f98073f-daa8-4796-955e-2f7d767d9125-kube-api-access-szfr8" (OuterVolumeSpecName: "kube-api-access-szfr8") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "kube-api-access-szfr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.942262 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.955241 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:24 crc kubenswrapper[4879]: I1125 14:54:24.972624 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "5f98073f-daa8-4796-955e-2f7d767d9125" (UID: "5f98073f-daa8-4796-955e-2f7d767d9125"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.033986 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034028 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szfr8\" (UniqueName: \"kubernetes.io/projected/5f98073f-daa8-4796-955e-2f7d767d9125-kube-api-access-szfr8\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034043 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034090 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034101 4879 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5f98073f-daa8-4796-955e-2f7d767d9125-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034108 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034117 4879 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98073f-daa8-4796-955e-2f7d767d9125-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.034150 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5f98073f-daa8-4796-955e-2f7d767d9125-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.050513 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.135113 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.266361 4879 generic.go:334] "Generic (PLEG): container finished" podID="5f98073f-daa8-4796-955e-2f7d767d9125" containerID="9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e" exitCode=0 Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.266464 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.269953 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f98073f-daa8-4796-955e-2f7d767d9125","Type":"ContainerDied","Data":"9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e"} Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.270050 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5f98073f-daa8-4796-955e-2f7d767d9125","Type":"ContainerDied","Data":"6d935c42e3280de3e0e1d4bfa0905c43bbbc329996e9a3ab0e58aad6a700bb17"} Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.270081 4879 scope.go:117] "RemoveContainer" containerID="9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.272684 4879 generic.go:334] "Generic (PLEG): container finished" podID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" exitCode=0 Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.272773 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fcfac5ba-5544-4d76-af22-0c8b6b9028a7","Type":"ContainerDied","Data":"384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2"} Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.272822 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fcfac5ba-5544-4d76-af22-0c8b6b9028a7","Type":"ContainerDied","Data":"0cc2c5bcd6a1005db1627d69760ce7fdbcbfda7164518d2bc0452fefda4d93db"} Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.273926 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.321263 4879 scope.go:117] "RemoveContainer" containerID="72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.325178 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.337961 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.343983 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.349786 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.352723 4879 scope.go:117] "RemoveContainer" containerID="9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e" Nov 25 14:54:25 crc kubenswrapper[4879]: E1125 14:54:25.353228 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e\": container with ID starting with 9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e not found: ID does not exist" containerID="9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.353258 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e"} err="failed to get container status \"9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e\": rpc error: code = NotFound desc = could not find container \"9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e\": container with ID starting with 9604f83f5ce7f84ceec446ae0f43627eff0b39a97f04d465915c89b2f5e3962e not found: ID does not exist" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.353281 4879 scope.go:117] "RemoveContainer" containerID="72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57" Nov 25 14:54:25 crc kubenswrapper[4879]: E1125 14:54:25.353774 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57\": container with ID starting with 72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57 not found: ID does not exist" containerID="72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.353815 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57"} err="failed to get container status \"72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57\": rpc error: code = NotFound desc = could not find container \"72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57\": container with ID starting with 72ade4f2ad4f0fc9ddce3ee15bcbfb0583fea94c3d214b67fe909c68d52f1a57 not found: ID does not exist" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.353842 4879 scope.go:117] "RemoveContainer" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.375319 4879 scope.go:117] "RemoveContainer" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" Nov 25 14:54:25 crc kubenswrapper[4879]: E1125 14:54:25.376046 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2\": container with ID starting with 384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2 not found: ID does not exist" containerID="384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.376104 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2"} err="failed to get container status \"384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2\": rpc error: code = NotFound desc = could not find container \"384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2\": container with ID starting with 384e4ecd8b543f6999f676e991e54e7192e08b5e9768cfa3aa51a713c14586f2 not found: ID does not exist" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.391721 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" containerName="kube-state-metrics" probeResult="failure" output="Get \"https://10.217.0.190:8081/readyz\": dial tcp 10.217.0.190:8081: i/o timeout" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.398922 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.174:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.655330 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="032be742-1ad2-493b-aeda-974292114a3c" path="/var/lib/kubelet/pods/032be742-1ad2-493b-aeda-974292114a3c/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.656024 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40700243-955b-472a-8d05-4a3284152528" path="/var/lib/kubelet/pods/40700243-955b-472a-8d05-4a3284152528/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.656737 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45f445eb-09f2-4dcc-b4a1-2728239cd955" path="/var/lib/kubelet/pods/45f445eb-09f2-4dcc-b4a1-2728239cd955/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.658290 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" path="/var/lib/kubelet/pods/5f98073f-daa8-4796-955e-2f7d767d9125/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.658934 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7742abe4-cffe-4f79-b818-75a364174832" path="/var/lib/kubelet/pods/7742abe4-cffe-4f79-b818-75a364174832/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.659428 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7827b848-c65a-4cab-8768-a435040aed03" path="/var/lib/kubelet/pods/7827b848-c65a-4cab-8768-a435040aed03/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.660330 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" path="/var/lib/kubelet/pods/8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.660904 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97317c8e-3668-45a4-84d5-a91b4a586e81" path="/var/lib/kubelet/pods/97317c8e-3668-45a4-84d5-a91b4a586e81/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.661449 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" path="/var/lib/kubelet/pods/a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.662836 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" path="/var/lib/kubelet/pods/ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.663723 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" path="/var/lib/kubelet/pods/c1814b22-d1b3-4426-9fa2-f613640f63e8/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.664574 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c36e8400-c985-4fb1-aab9-8cced923f7ed" path="/var/lib/kubelet/pods/c36e8400-c985-4fb1-aab9-8cced923f7ed/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.665824 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca53b222-8018-4445-aa86-5401dbc847b5" path="/var/lib/kubelet/pods/ca53b222-8018-4445-aa86-5401dbc847b5/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.666435 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3812c9b-a0f4-4c22-a569-f99b2e1ab610" path="/var/lib/kubelet/pods/d3812c9b-a0f4-4c22-a569-f99b2e1ab610/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.667608 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df7b7503-cc0d-48fe-be8a-75f2362edebf" path="/var/lib/kubelet/pods/df7b7503-cc0d-48fe-be8a-75f2362edebf/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.669286 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea2d8cdd-6c47-4cf7-b336-933762d2c445" path="/var/lib/kubelet/pods/ea2d8cdd-6c47-4cf7-b336-933762d2c445/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.670081 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3d76cb1-a568-45e0-ac32-903f49ffbe54" path="/var/lib/kubelet/pods/f3d76cb1-a568-45e0-ac32-903f49ffbe54/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.671106 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" path="/var/lib/kubelet/pods/f8f130d5-685b-4b37-89bb-b9536109c8fb/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: I1125 14:54:25.673006 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" path="/var/lib/kubelet/pods/fcfac5ba-5544-4d76-af22-0c8b6b9028a7/volumes" Nov 25 14:54:25 crc kubenswrapper[4879]: E1125 14:54:25.845000 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:25 crc kubenswrapper[4879]: E1125 14:54:25.845070 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:29.845054987 +0000 UTC m=+1761.448468058 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.308071 4879 generic.go:334] "Generic (PLEG): container finished" podID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerID="28f1ee62e8f0217666143217990ac5ddfaf70ba0a32edec6f7c649da3cf673b3" exitCode=0 Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.308493 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d45fd54f-j9t95" event={"ID":"36963cc9-ce9a-4f42-81ac-1a5afde50592","Type":"ContainerDied","Data":"28f1ee62e8f0217666143217990ac5ddfaf70ba0a32edec6f7c649da3cf673b3"} Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.371890 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.469624 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-internal-tls-certs\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.469800 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-httpd-config\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.469883 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-public-tls-certs\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.469947 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-combined-ca-bundle\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.470023 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-ovndb-tls-certs\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.470050 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-config\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.470167 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhnpj\" (UniqueName: \"kubernetes.io/projected/36963cc9-ce9a-4f42-81ac-1a5afde50592-kube-api-access-xhnpj\") pod \"36963cc9-ce9a-4f42-81ac-1a5afde50592\" (UID: \"36963cc9-ce9a-4f42-81ac-1a5afde50592\") " Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.475901 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.476908 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36963cc9-ce9a-4f42-81ac-1a5afde50592-kube-api-access-xhnpj" (OuterVolumeSpecName: "kube-api-access-xhnpj") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "kube-api-access-xhnpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.514591 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-config" (OuterVolumeSpecName: "config") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.514797 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.516005 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.525317 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.536417 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "36963cc9-ce9a-4f42-81ac-1a5afde50592" (UID: "36963cc9-ce9a-4f42-81ac-1a5afde50592"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.572606 4879 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.572878 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.572944 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhnpj\" (UniqueName: \"kubernetes.io/projected/36963cc9-ce9a-4f42-81ac-1a5afde50592-kube-api-access-xhnpj\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.573042 4879 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.573108 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.573182 4879 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:27 crc kubenswrapper[4879]: I1125 14:54:27.573239 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36963cc9-ce9a-4f42-81ac-1a5afde50592-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:28 crc kubenswrapper[4879]: I1125 14:54:28.320319 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-66d45fd54f-j9t95" event={"ID":"36963cc9-ce9a-4f42-81ac-1a5afde50592","Type":"ContainerDied","Data":"d2dd424392a1048a563080d38949b43ade371b2ac15e8e2b0e87586354fdebbc"} Nov 25 14:54:28 crc kubenswrapper[4879]: I1125 14:54:28.320381 4879 scope.go:117] "RemoveContainer" containerID="05f0aa550451ea4ec5584e060eeaaaa5e3700f2bd21f70929ff15b059676284f" Nov 25 14:54:28 crc kubenswrapper[4879]: I1125 14:54:28.320482 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-66d45fd54f-j9t95" Nov 25 14:54:28 crc kubenswrapper[4879]: I1125 14:54:28.344509 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-66d45fd54f-j9t95"] Nov 25 14:54:28 crc kubenswrapper[4879]: I1125 14:54:28.346246 4879 scope.go:117] "RemoveContainer" containerID="28f1ee62e8f0217666143217990ac5ddfaf70ba0a32edec6f7c649da3cf673b3" Nov 25 14:54:28 crc kubenswrapper[4879]: I1125 14:54:28.367596 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-66d45fd54f-j9t95"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.698696 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.698960 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.699093 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.699756 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.699820 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.700709 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.701816 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:28 crc kubenswrapper[4879]: E1125 14:54:28.701895 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:54:29 crc kubenswrapper[4879]: I1125 14:54:29.653517 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" path="/var/lib/kubelet/pods/36963cc9-ce9a-4f42-81ac-1a5afde50592/volumes" Nov 25 14:54:29 crc kubenswrapper[4879]: E1125 14:54:29.913203 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:29 crc kubenswrapper[4879]: E1125 14:54:29.913290 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:37.913266674 +0000 UTC m=+1769.516679745 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:30 crc kubenswrapper[4879]: I1125 14:54:30.644165 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:54:30 crc kubenswrapper[4879]: E1125 14:54:30.644381 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.697718 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.698636 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.699241 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.699401 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.700048 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.701439 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.704092 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:33 crc kubenswrapper[4879]: E1125 14:54:33.704181 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:54:37 crc kubenswrapper[4879]: E1125 14:54:37.941912 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:37 crc kubenswrapper[4879]: E1125 14:54:37.942544 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:54:53.942523023 +0000 UTC m=+1785.545936114 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.384112 4879 scope.go:117] "RemoveContainer" containerID="59cc78ec01c49083ae2ec8fb51182d8ba11ddab3a7333e816a2b50accaef8fdc" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.416272 4879 scope.go:117] "RemoveContainer" containerID="31672bdc9b2a12f616293fa7b4333480edcb9675623c5573e6a892c73ff26326" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.435935 4879 scope.go:117] "RemoveContainer" containerID="a7831c10e52dea800b5ac0b645f0ad3ee3a1384503fb815d37c65ab355043571" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.493846 4879 scope.go:117] "RemoveContainer" containerID="356c8b0d5d0a619eca770586e3ffc2155074fbe2adbe3a513de619e7a0a60203" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.530422 4879 scope.go:117] "RemoveContainer" containerID="12598c3acdb3699a42bbab7f4f6aec79e1cae59bae5358e126e6804e1b221dff" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.549956 4879 scope.go:117] "RemoveContainer" containerID="66d1cb297f19e2391f50e7a7f3d03631d216851e0f2b568c9abf77918731b2c4" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.579505 4879 scope.go:117] "RemoveContainer" containerID="a4898d7db1c1ab0d995ff0bade8718c92a4ed14360d6c8adef0afed06d89ace7" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.602702 4879 scope.go:117] "RemoveContainer" containerID="16a1925a0bab70bc1be829bb908cb1c888c4edac0592caffaf1453dc85a92d61" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.644949 4879 scope.go:117] "RemoveContainer" containerID="75f7f2f6ce0d7c84fb222485fd7de19899838f066a478987f69f8229457eaa90" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.667575 4879 scope.go:117] "RemoveContainer" containerID="73e8dd61029c94294aeb86b9c378ec7ea7b10d602ce332a8330879bb615f72bb" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.689179 4879 scope.go:117] "RemoveContainer" containerID="c5a3d32bd6664e58ca040b6cfc96b13c945dc2b2fc8f9feefcfd2798ec76fabb" Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.697899 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.698408 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.698632 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.698661 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.699431 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.701429 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.702741 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:38 crc kubenswrapper[4879]: E1125 14:54:38.702827 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.721347 4879 scope.go:117] "RemoveContainer" containerID="01d98062e3558ed331fc75ea66b8e2865a9d6e32d2261a8a4e32c7c3cc6584de" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.745930 4879 scope.go:117] "RemoveContainer" containerID="55b2d5a565b513c07e018ca250204a029bcc3a613837014a192a04cf01b5b836" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.767268 4879 scope.go:117] "RemoveContainer" containerID="4abc7c5e3d05542a1b27d4a7019077e6d57ea395b9704e3ec3d81e73a38f0fb1" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.786664 4879 scope.go:117] "RemoveContainer" containerID="edf8e81a61cd702c70a2c3934e932de736641ae6fa5c76a23b60ee36e54d3b5f" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.805268 4879 scope.go:117] "RemoveContainer" containerID="dfa8795114b5f0a0a7289db1a6c75a7e18bf770c9753b6f30a6cc638d3462c39" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.827151 4879 scope.go:117] "RemoveContainer" containerID="350d4e7b8e9d631147fe9a755af14db63aa3e7ee234ef7375cd6205d968d2327" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.846674 4879 scope.go:117] "RemoveContainer" containerID="3e9070ec10259d4770e7bc3194ce1d9eb318aa6dfe8b7ad207427c483b3d55b8" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.876617 4879 scope.go:117] "RemoveContainer" containerID="8f250436d4a2c6eef379ef9363ed87b3aad3f00f51ff3d000d658c53000309df" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.910778 4879 scope.go:117] "RemoveContainer" containerID="c1e6e0ff9d243448044098d3232995e2d1750e73419d17539f646a12b915a82f" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.930904 4879 scope.go:117] "RemoveContainer" containerID="60a5a0e65585a2bc5eebda61c00e943a714eb25b99f7e1142ce1d52ab77390f4" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.950331 4879 scope.go:117] "RemoveContainer" containerID="0aa845f6803c9eb24a06233769b5289019ad7726632f483f0ee6f162d8982f30" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.972178 4879 scope.go:117] "RemoveContainer" containerID="c67064a374f4c5f4abf06b24674bf19e46cdaefe83ab057e721853f6b7b3b0a0" Nov 25 14:54:38 crc kubenswrapper[4879]: I1125 14:54:38.993316 4879 scope.go:117] "RemoveContainer" containerID="debe7798b9436c2fd4e85e9d3be0ad032cd6fa97b197b874840d667896089e8a" Nov 25 14:54:42 crc kubenswrapper[4879]: I1125 14:54:42.644699 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:54:42 crc kubenswrapper[4879]: E1125 14:54:42.645488 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.698212 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.699308 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.699396 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.699907 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.699944 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.701234 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.703423 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 25 14:54:43 crc kubenswrapper[4879]: E1125 14:54:43.703753 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-2n7ff" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.406748 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2n7ff_a98226c2-37b7-46b6-ba95-ad7fb26e2402/ovs-vswitchd/0.log" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.407813 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.477837 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2n7ff_a98226c2-37b7-46b6-ba95-ad7fb26e2402/ovs-vswitchd/0.log" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.478510 4879 generic.go:334] "Generic (PLEG): container finished" podID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" exitCode=137 Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.478593 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerDied","Data":"8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b"} Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.478619 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2n7ff" event={"ID":"a98226c2-37b7-46b6-ba95-ad7fb26e2402","Type":"ContainerDied","Data":"850b5152ff08503da0ba810245f0c46534854023d505e357b1d2bd961a9588a8"} Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.478634 4879 scope.go:117] "RemoveContainer" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.478743 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2n7ff" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.488013 4879 generic.go:334] "Generic (PLEG): container finished" podID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerID="1a3386a1d224fe22edaf0215700f7f28f92829314b7558a91a246f504e5ef884" exitCode=137 Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.488054 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"1a3386a1d224fe22edaf0215700f7f28f92829314b7558a91a246f504e5ef884"} Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550521 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dt56\" (UniqueName: \"kubernetes.io/projected/a98226c2-37b7-46b6-ba95-ad7fb26e2402-kube-api-access-5dt56\") pod \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550616 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-run\") pod \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550643 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-log\") pod \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550665 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-run" (OuterVolumeSpecName: "var-run") pod "a98226c2-37b7-46b6-ba95-ad7fb26e2402" (UID: "a98226c2-37b7-46b6-ba95-ad7fb26e2402"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550693 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-lib\") pod \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550737 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-lib" (OuterVolumeSpecName: "var-lib") pod "a98226c2-37b7-46b6-ba95-ad7fb26e2402" (UID: "a98226c2-37b7-46b6-ba95-ad7fb26e2402"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550757 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-log" (OuterVolumeSpecName: "var-log") pod "a98226c2-37b7-46b6-ba95-ad7fb26e2402" (UID: "a98226c2-37b7-46b6-ba95-ad7fb26e2402"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550795 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a98226c2-37b7-46b6-ba95-ad7fb26e2402-scripts\") pod \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.550901 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-etc-ovs\") pod \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\" (UID: \"a98226c2-37b7-46b6-ba95-ad7fb26e2402\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.551000 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "a98226c2-37b7-46b6-ba95-ad7fb26e2402" (UID: "a98226c2-37b7-46b6-ba95-ad7fb26e2402"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.551776 4879 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-run\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.551935 4879 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-log\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.551951 4879 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-var-lib\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.551858 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a98226c2-37b7-46b6-ba95-ad7fb26e2402-scripts" (OuterVolumeSpecName: "scripts") pod "a98226c2-37b7-46b6-ba95-ad7fb26e2402" (UID: "a98226c2-37b7-46b6-ba95-ad7fb26e2402"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.551961 4879 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/a98226c2-37b7-46b6-ba95-ad7fb26e2402-etc-ovs\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.555794 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a98226c2-37b7-46b6-ba95-ad7fb26e2402-kube-api-access-5dt56" (OuterVolumeSpecName: "kube-api-access-5dt56") pod "a98226c2-37b7-46b6-ba95-ad7fb26e2402" (UID: "a98226c2-37b7-46b6-ba95-ad7fb26e2402"). InnerVolumeSpecName "kube-api-access-5dt56". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.558092 4879 scope.go:117] "RemoveContainer" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.595246 4879 scope.go:117] "RemoveContainer" containerID="bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.622335 4879 scope.go:117] "RemoveContainer" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" Nov 25 14:54:45 crc kubenswrapper[4879]: E1125 14:54:45.622880 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b\": container with ID starting with 8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b not found: ID does not exist" containerID="8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.622929 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b"} err="failed to get container status \"8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b\": rpc error: code = NotFound desc = could not find container \"8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b\": container with ID starting with 8d0d2fa04abe47e2adf25c601569add949f3618181ff2d80325b501e73b5a25b not found: ID does not exist" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.622963 4879 scope.go:117] "RemoveContainer" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" Nov 25 14:54:45 crc kubenswrapper[4879]: E1125 14:54:45.623321 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060\": container with ID starting with 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 not found: ID does not exist" containerID="092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.623341 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060"} err="failed to get container status \"092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060\": rpc error: code = NotFound desc = could not find container \"092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060\": container with ID starting with 092614f7c59a9f175b8ba8bb4d2dea77161e3a4b786f527873baec9312e02060 not found: ID does not exist" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.623355 4879 scope.go:117] "RemoveContainer" containerID="bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540" Nov 25 14:54:45 crc kubenswrapper[4879]: E1125 14:54:45.623804 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540\": container with ID starting with bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540 not found: ID does not exist" containerID="bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.623842 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540"} err="failed to get container status \"bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540\": rpc error: code = NotFound desc = could not find container \"bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540\": container with ID starting with bf2e7f5fece51c4b0090afd3f15b44bfe7cad625ed87eb243384258510bde540 not found: ID does not exist" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.652653 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dt56\" (UniqueName: \"kubernetes.io/projected/a98226c2-37b7-46b6-ba95-ad7fb26e2402-kube-api-access-5dt56\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.652682 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a98226c2-37b7-46b6-ba95-ad7fb26e2402-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.802200 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.803013 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-2n7ff"] Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.808998 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-2n7ff"] Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.956782 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-cache\") pod \"734e5d8b-907c-4246-adca-6a05a98c0b27\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.956881 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-lock\") pod \"734e5d8b-907c-4246-adca-6a05a98c0b27\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.956965 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") pod \"734e5d8b-907c-4246-adca-6a05a98c0b27\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.956984 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jv8d\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-kube-api-access-8jv8d\") pod \"734e5d8b-907c-4246-adca-6a05a98c0b27\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.957021 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"734e5d8b-907c-4246-adca-6a05a98c0b27\" (UID: \"734e5d8b-907c-4246-adca-6a05a98c0b27\") " Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.957813 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-cache" (OuterVolumeSpecName: "cache") pod "734e5d8b-907c-4246-adca-6a05a98c0b27" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.958194 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-lock" (OuterVolumeSpecName: "lock") pod "734e5d8b-907c-4246-adca-6a05a98c0b27" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.961162 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "734e5d8b-907c-4246-adca-6a05a98c0b27" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.961322 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-kube-api-access-8jv8d" (OuterVolumeSpecName: "kube-api-access-8jv8d") pod "734e5d8b-907c-4246-adca-6a05a98c0b27" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27"). InnerVolumeSpecName "kube-api-access-8jv8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:45 crc kubenswrapper[4879]: I1125 14:54:45.964239 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "734e5d8b-907c-4246-adca-6a05a98c0b27" (UID: "734e5d8b-907c-4246-adca-6a05a98c0b27"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.058662 4879 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-lock\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.059011 4879 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.059027 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jv8d\" (UniqueName: \"kubernetes.io/projected/734e5d8b-907c-4246-adca-6a05a98c0b27-kube-api-access-8jv8d\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.059107 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.059142 4879 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/734e5d8b-907c-4246-adca-6a05a98c0b27-cache\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.075770 4879 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.159854 4879 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.504722 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"734e5d8b-907c-4246-adca-6a05a98c0b27","Type":"ContainerDied","Data":"fa4f882c8414a9031e52ed9ed8c1269005df3446fe4a953c62ec3f4473c2d804"} Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.504786 4879 scope.go:117] "RemoveContainer" containerID="1a3386a1d224fe22edaf0215700f7f28f92829314b7558a91a246f504e5ef884" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.504817 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.529637 4879 scope.go:117] "RemoveContainer" containerID="cd5d7ea0e9867e8b0fe2167eed20836d54166e4956798392a0fa624050ba2841" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.554204 4879 scope.go:117] "RemoveContainer" containerID="d24b61816a0df221f5cca68298192d65a0db4ffd65d2f4a3f373892fc2581637" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.563514 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.571395 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.578703 4879 scope.go:117] "RemoveContainer" containerID="9aa6cecc8f842351e35fda9d74c697191565ff3b682681037963b8761a8ddb66" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.596750 4879 scope.go:117] "RemoveContainer" containerID="44f30f9bf7a2177883d8a6f1b7f870687899a5c37317a986aa4bf85dbf743403" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.615301 4879 scope.go:117] "RemoveContainer" containerID="49bdf74c01b8d49e4758eb2d1f183fd8edba4323eaf30b7d8764ca72f601ca8e" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.634198 4879 scope.go:117] "RemoveContainer" containerID="5c9cb1ae5da818f9561819c0427fef18049f551233eaa6475166bbfe4e96a29e" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.653376 4879 scope.go:117] "RemoveContainer" containerID="78e26b5f462da8135c185dc405d7bb3e40a86f3a0a756b228397651aec46fca7" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.671787 4879 scope.go:117] "RemoveContainer" containerID="78e8c927e3ebbf38f18921232f416853c52caaa147f9bd6e0b42cb5c79ac392f" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.690555 4879 scope.go:117] "RemoveContainer" containerID="0e0569b112382ff911d6d5ffb10cba08cccdc02f2dc893c1ebc01b9c2863ce6b" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.710517 4879 scope.go:117] "RemoveContainer" containerID="f2156b4d210c38de5222f44394d8ff73450ee1af32aec0303948ee68935f943c" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.732160 4879 scope.go:117] "RemoveContainer" containerID="0f3579270e9a0136c7f68a5c3e04c11ba1d26b44c0c1ceb3b31b4cbca3cf4ba7" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.749716 4879 scope.go:117] "RemoveContainer" containerID="eef1d095ea350449cd4e4c13b9b72afe1590ad3b549d49103ddfa4d450adeab0" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.769437 4879 scope.go:117] "RemoveContainer" containerID="20f98654045b89872f5df2f364f5b15e9060829a694cc0678d30a79c4ecdb272" Nov 25 14:54:46 crc kubenswrapper[4879]: I1125 14:54:46.790010 4879 scope.go:117] "RemoveContainer" containerID="01d6163416959123eea9546db5e997dc58a1c8fb48cc8df296356f31a71cb2f2" Nov 25 14:54:47 crc kubenswrapper[4879]: I1125 14:54:47.657705 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" path="/var/lib/kubelet/pods/734e5d8b-907c-4246-adca-6a05a98c0b27/volumes" Nov 25 14:54:47 crc kubenswrapper[4879]: I1125 14:54:47.659453 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" path="/var/lib/kubelet/pods/a98226c2-37b7-46b6-ba95-ad7fb26e2402/volumes" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.564577 4879 generic.go:334] "Generic (PLEG): container finished" podID="3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" containerID="55880395db69d8e89d332ff4acd427f5ad61a8c22f2c6fc5803cf83bd5ee049c" exitCode=137 Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.564689 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi40fc-account-delete-2nm97" event={"ID":"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7","Type":"ContainerDied","Data":"55880395db69d8e89d332ff4acd427f5ad61a8c22f2c6fc5803cf83bd5ee049c"} Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.565116 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi40fc-account-delete-2nm97" event={"ID":"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7","Type":"ContainerDied","Data":"4bdd399320c939f3d2bdf59e0ed90fd94f80d881805fd845c0a6be5fefd851be"} Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.566319 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bdd399320c939f3d2bdf59e0ed90fd94f80d881805fd845c0a6be5fefd851be" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.566861 4879 generic.go:334] "Generic (PLEG): container finished" podID="e5944a1b-2616-48bd-9695-32641324e1c2" containerID="d0302612476ccf74c6889b66b2329d309c9a5c72dd0400e67fa30365f9b4ac12" exitCode=137 Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.566903 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0c4f1-account-delete-cfzvs" event={"ID":"e5944a1b-2616-48bd-9695-32641324e1c2","Type":"ContainerDied","Data":"d0302612476ccf74c6889b66b2329d309c9a5c72dd0400e67fa30365f9b4ac12"} Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.566925 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0c4f1-account-delete-cfzvs" event={"ID":"e5944a1b-2616-48bd-9695-32641324e1c2","Type":"ContainerDied","Data":"ef5dfd67f0637a7d84997bc98f60705c4a91a04f1ccbc3ae4162012401f1df1f"} Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.566941 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef5dfd67f0637a7d84997bc98f60705c4a91a04f1ccbc3ae4162012401f1df1f" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.586172 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.593463 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.749829 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-operator-scripts\") pod \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.749892 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vbbl\" (UniqueName: \"kubernetes.io/projected/e5944a1b-2616-48bd-9695-32641324e1c2-kube-api-access-9vbbl\") pod \"e5944a1b-2616-48bd-9695-32641324e1c2\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.749989 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5944a1b-2616-48bd-9695-32641324e1c2-operator-scripts\") pod \"e5944a1b-2616-48bd-9695-32641324e1c2\" (UID: \"e5944a1b-2616-48bd-9695-32641324e1c2\") " Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.750016 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx2kk\" (UniqueName: \"kubernetes.io/projected/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-kube-api-access-fx2kk\") pod \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\" (UID: \"3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7\") " Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.750925 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5944a1b-2616-48bd-9695-32641324e1c2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5944a1b-2616-48bd-9695-32641324e1c2" (UID: "e5944a1b-2616-48bd-9695-32641324e1c2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.750956 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" (UID: "3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.757094 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-kube-api-access-fx2kk" (OuterVolumeSpecName: "kube-api-access-fx2kk") pod "3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" (UID: "3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7"). InnerVolumeSpecName "kube-api-access-fx2kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.757321 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5944a1b-2616-48bd-9695-32641324e1c2-kube-api-access-9vbbl" (OuterVolumeSpecName: "kube-api-access-9vbbl") pod "e5944a1b-2616-48bd-9695-32641324e1c2" (UID: "e5944a1b-2616-48bd-9695-32641324e1c2"). InnerVolumeSpecName "kube-api-access-9vbbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.851495 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5944a1b-2616-48bd-9695-32641324e1c2-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.851535 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx2kk\" (UniqueName: \"kubernetes.io/projected/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-kube-api-access-fx2kk\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.851546 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:52 crc kubenswrapper[4879]: I1125 14:54:52.851558 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vbbl\" (UniqueName: \"kubernetes.io/projected/e5944a1b-2616-48bd-9695-32641324e1c2-kube-api-access-9vbbl\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.575189 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi40fc-account-delete-2nm97" Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.575249 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0c4f1-account-delete-cfzvs" Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.617940 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi40fc-account-delete-2nm97"] Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.625553 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi40fc-account-delete-2nm97"] Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.632076 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0c4f1-account-delete-cfzvs"] Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.638521 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell0c4f1-account-delete-cfzvs"] Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.654745 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" path="/var/lib/kubelet/pods/3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7/volumes" Nov 25 14:54:53 crc kubenswrapper[4879]: I1125 14:54:53.655284 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5944a1b-2616-48bd-9695-32641324e1c2" path="/var/lib/kubelet/pods/e5944a1b-2616-48bd-9695-32641324e1c2/volumes" Nov 25 14:54:53 crc kubenswrapper[4879]: E1125 14:54:53.965790 4879 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 25 14:54:53 crc kubenswrapper[4879]: E1125 14:54:53.965874 4879 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts podName:4542a25b-82fa-419d-a6bb-8a2f653d88a1 nodeName:}" failed. No retries permitted until 2025-11-25 14:55:25.965853371 +0000 UTC m=+1817.569266442 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts") pod "neutronb847-account-delete-qvsn6" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1") : configmap "openstack-scripts" not found Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.568689 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.587320 4879 generic.go:334] "Generic (PLEG): container finished" podID="4542a25b-82fa-419d-a6bb-8a2f653d88a1" containerID="ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4" exitCode=137 Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.587367 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb847-account-delete-qvsn6" event={"ID":"4542a25b-82fa-419d-a6bb-8a2f653d88a1","Type":"ContainerDied","Data":"ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4"} Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.587398 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronb847-account-delete-qvsn6" event={"ID":"4542a25b-82fa-419d-a6bb-8a2f653d88a1","Type":"ContainerDied","Data":"60c1bb95fb667e1cfb8ef1c47bd60a33a7957edb94f58f6505db0fb2d7fd42c3"} Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.587401 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronb847-account-delete-qvsn6" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.587414 4879 scope.go:117] "RemoveContainer" containerID="ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.618102 4879 scope.go:117] "RemoveContainer" containerID="ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4" Nov 25 14:54:54 crc kubenswrapper[4879]: E1125 14:54:54.618596 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4\": container with ID starting with ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4 not found: ID does not exist" containerID="ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.618653 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4"} err="failed to get container status \"ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4\": rpc error: code = NotFound desc = could not find container \"ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4\": container with ID starting with ed00ed22774f27bcb1c98b8d724bf5a40430ae8a16e661ac9c155223c361b2a4 not found: ID does not exist" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.674527 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts\") pod \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.674588 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9vc2\" (UniqueName: \"kubernetes.io/projected/4542a25b-82fa-419d-a6bb-8a2f653d88a1-kube-api-access-w9vc2\") pod \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\" (UID: \"4542a25b-82fa-419d-a6bb-8a2f653d88a1\") " Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.675402 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4542a25b-82fa-419d-a6bb-8a2f653d88a1" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.680421 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4542a25b-82fa-419d-a6bb-8a2f653d88a1-kube-api-access-w9vc2" (OuterVolumeSpecName: "kube-api-access-w9vc2") pod "4542a25b-82fa-419d-a6bb-8a2f653d88a1" (UID: "4542a25b-82fa-419d-a6bb-8a2f653d88a1"). InnerVolumeSpecName "kube-api-access-w9vc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.776210 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4542a25b-82fa-419d-a6bb-8a2f653d88a1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.776252 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9vc2\" (UniqueName: \"kubernetes.io/projected/4542a25b-82fa-419d-a6bb-8a2f653d88a1-kube-api-access-w9vc2\") on node \"crc\" DevicePath \"\"" Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.922936 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronb847-account-delete-qvsn6"] Nov 25 14:54:54 crc kubenswrapper[4879]: I1125 14:54:54.928831 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronb847-account-delete-qvsn6"] Nov 25 14:54:55 crc kubenswrapper[4879]: I1125 14:54:55.644464 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:54:55 crc kubenswrapper[4879]: E1125 14:54:55.644732 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:54:55 crc kubenswrapper[4879]: I1125 14:54:55.653257 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4542a25b-82fa-419d-a6bb-8a2f653d88a1" path="/var/lib/kubelet/pods/4542a25b-82fa-419d-a6bb-8a2f653d88a1/volumes" Nov 25 14:55:09 crc kubenswrapper[4879]: I1125 14:55:09.655621 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:55:09 crc kubenswrapper[4879]: E1125 14:55:09.656312 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:55:22 crc kubenswrapper[4879]: I1125 14:55:22.644896 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:55:22 crc kubenswrapper[4879]: E1125 14:55:22.645885 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:55:35 crc kubenswrapper[4879]: I1125 14:55:35.645011 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:55:35 crc kubenswrapper[4879]: E1125 14:55:35.646636 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:55:39 crc kubenswrapper[4879]: I1125 14:55:39.607407 4879 scope.go:117] "RemoveContainer" containerID="91d3c832a6ed865c0a273e270acf54ffd2d958b4f067f40712a077d4845de570" Nov 25 14:55:39 crc kubenswrapper[4879]: I1125 14:55:39.644582 4879 scope.go:117] "RemoveContainer" containerID="4e612833ae0b26c1698789035216d1a5fc8ea3d9745279c425d6aa0e79629e25" Nov 25 14:55:39 crc kubenswrapper[4879]: I1125 14:55:39.673058 4879 scope.go:117] "RemoveContainer" containerID="d803fecf629b7d3ed726ab476ece719d4ac8a1643c196150048521897f49ce98" Nov 25 14:55:50 crc kubenswrapper[4879]: I1125 14:55:50.646397 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:55:50 crc kubenswrapper[4879]: E1125 14:55:50.647223 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:56:05 crc kubenswrapper[4879]: I1125 14:56:05.645319 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:56:05 crc kubenswrapper[4879]: E1125 14:56:05.647785 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:56:19 crc kubenswrapper[4879]: I1125 14:56:19.650428 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:56:19 crc kubenswrapper[4879]: E1125 14:56:19.651466 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:56:30 crc kubenswrapper[4879]: I1125 14:56:30.646258 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:56:30 crc kubenswrapper[4879]: E1125 14:56:30.647105 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:56:39 crc kubenswrapper[4879]: I1125 14:56:39.843012 4879 scope.go:117] "RemoveContainer" containerID="3f48970c185dfaf1d95c8ae3db2de1e8b88ce02bf6cadd6e87e1ea0d966ae78b" Nov 25 14:56:39 crc kubenswrapper[4879]: I1125 14:56:39.867672 4879 scope.go:117] "RemoveContainer" containerID="467cda5a92e4a9568c185fb7b0dd1d0604375fe7911694bf33b41a4495f33f7c" Nov 25 14:56:39 crc kubenswrapper[4879]: I1125 14:56:39.887200 4879 scope.go:117] "RemoveContainer" containerID="12cb4bc8da4db9491df4437ab2a20d80c67926b82bd1b5a731c8081c2c5dfefc" Nov 25 14:56:39 crc kubenswrapper[4879]: I1125 14:56:39.915969 4879 scope.go:117] "RemoveContainer" containerID="1f411d7aa7397b9a94d385fe775d63a5aaf0a0e6eba3463194e645675a3ae6cf" Nov 25 14:56:42 crc kubenswrapper[4879]: I1125 14:56:42.644772 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:56:42 crc kubenswrapper[4879]: E1125 14:56:42.645378 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:56:54 crc kubenswrapper[4879]: I1125 14:56:54.645156 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:56:54 crc kubenswrapper[4879]: E1125 14:56:54.646003 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:57:08 crc kubenswrapper[4879]: I1125 14:57:08.644348 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:57:08 crc kubenswrapper[4879]: E1125 14:57:08.645030 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:57:22 crc kubenswrapper[4879]: I1125 14:57:22.644952 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:57:22 crc kubenswrapper[4879]: E1125 14:57:22.647236 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:57:37 crc kubenswrapper[4879]: I1125 14:57:37.647770 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:57:37 crc kubenswrapper[4879]: E1125 14:57:37.649705 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 14:57:39 crc kubenswrapper[4879]: I1125 14:57:39.969204 4879 scope.go:117] "RemoveContainer" containerID="63f4b205ecd54c9bcf210196a87df986e2472d90896a51e546fd5c05d2574473" Nov 25 14:57:39 crc kubenswrapper[4879]: I1125 14:57:39.990809 4879 scope.go:117] "RemoveContainer" containerID="fe799137727ba1eb4807a4cc49ddff7756508ae8f1751901fa9dc73f1198b961" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.011427 4879 scope.go:117] "RemoveContainer" containerID="4bbe35807a1a8ba04bc01121bc68265add08a5e4d443ccd4ad29d5ac1402a13e" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.035392 4879 scope.go:117] "RemoveContainer" containerID="93a7daf489e066419642d541945b2893cce9e5e066db82129cc2cfab9c02e43d" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.052472 4879 scope.go:117] "RemoveContainer" containerID="266a6f13e0258d2c83de2f1cada5a19bdc337665505445a2038543adf1aa1655" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.079312 4879 scope.go:117] "RemoveContainer" containerID="a457e76a3c320a65a07c2c4f3d622b70550393c726b7091f59077e4f3566579e" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.111710 4879 scope.go:117] "RemoveContainer" containerID="8b208ff4a68234006814d15aa7c752fd6ae2e91cfa74299335aaeedfc96da722" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.139617 4879 scope.go:117] "RemoveContainer" containerID="43d6def0377bafb29b5c10eb1df77b870221fb7bd30f2eaad8a2c95a040138a3" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.164401 4879 scope.go:117] "RemoveContainer" containerID="fbe88a6cb4b6ad2f2d6694834ebb4037cf9920df6190fa12bafcde301a5a22df" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.197978 4879 scope.go:117] "RemoveContainer" containerID="7d080f8359f9fea26f6bc4835f5bfefbc20ad3438d3ba6af33f49da9ea90d11f" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.221098 4879 scope.go:117] "RemoveContainer" containerID="dd9719475c4335a86036dc249bfd8e7b5fc7dc1a22a25d830de6783cc81e7d70" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.240011 4879 scope.go:117] "RemoveContainer" containerID="8bab8985c7010c6b6dd0f0feb2d6a0fbbc3be6f8716197ac4c3a366eb018ce99" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.261221 4879 scope.go:117] "RemoveContainer" containerID="84b6a2ca23202770c56d86c1035c27872867bfe1a3b79394726ddc3841987f01" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.279868 4879 scope.go:117] "RemoveContainer" containerID="3d6f5969e21c36bba7f2b94891e4cd1f3063118e9a14bb247b5fc957140227ea" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.321074 4879 scope.go:117] "RemoveContainer" containerID="a80540e1f8d0795a7d0f08a859ed3eccf014e5be89d1ab61902666a3115d369e" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.335550 4879 scope.go:117] "RemoveContainer" containerID="37f1273f7cf5c502d0c77a51a61d32202ae16d213bc42b5c8e8e7ee5c19bf9d5" Nov 25 14:57:40 crc kubenswrapper[4879]: I1125 14:57:40.350895 4879 scope.go:117] "RemoveContainer" containerID="9343436781b61bd3ebd9153984239f2fc0376be9027802ac40c4803a93fa4db4" Nov 25 14:57:52 crc kubenswrapper[4879]: I1125 14:57:52.645853 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 14:57:54 crc kubenswrapper[4879]: I1125 14:57:54.102662 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"bac139da3495699b039e7361e407b63eb9bf2bfecd2cb38712afa802bae51e2d"} Nov 25 14:58:40 crc kubenswrapper[4879]: I1125 14:58:40.506509 4879 scope.go:117] "RemoveContainer" containerID="34f47d04d59bb556c83ac90c7fd44769f03020a5c2534c1b206563b49dfba9f1" Nov 25 14:58:40 crc kubenswrapper[4879]: I1125 14:58:40.556670 4879 scope.go:117] "RemoveContainer" containerID="3026ef6362da67dd8812a17397d7367d13aad659ca6534125a24e47f36e7fe36" Nov 25 14:59:40 crc kubenswrapper[4879]: I1125 14:59:40.637721 4879 scope.go:117] "RemoveContainer" containerID="4b176e25b3d3736f8a1ed0025839a36198bbcdf4c98a15829d74dfff88a6fc04" Nov 25 14:59:40 crc kubenswrapper[4879]: I1125 14:59:40.676568 4879 scope.go:117] "RemoveContainer" containerID="e83dc9e66663fb6d1e40ab3bfc9715d51bf0cdac7da98606573a35f6b511db65" Nov 25 14:59:40 crc kubenswrapper[4879]: I1125 14:59:40.695424 4879 scope.go:117] "RemoveContainer" containerID="af2731b4880c2c108e4f64495391dcd175a79df8df0d6cf81c3c2064f645d782" Nov 25 14:59:40 crc kubenswrapper[4879]: I1125 14:59:40.719800 4879 scope.go:117] "RemoveContainer" containerID="de025035b116fd2fdb67de24fcac9a9460c3f5db41388245820cfcb5058ef966" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.320160 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z67k7"] Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321042 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="cinder-scheduler" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321060 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="cinder-scheduler" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321083 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321090 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321101 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321108 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321143 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321151 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321167 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321174 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321196 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server-init" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321204 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server-init" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321212 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321220 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321236 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321243 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-api" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321256 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="proxy-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321263 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="proxy-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321275 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea2d8cdd-6c47-4cf7-b336-933762d2c445" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321283 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea2d8cdd-6c47-4cf7-b336-933762d2c445" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321297 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321305 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321316 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="rsync" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321322 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="rsync" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321336 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="swift-recon-cron" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321343 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="swift-recon-cron" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321353 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-central-agent" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321360 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-central-agent" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321373 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5944a1b-2616-48bd-9695-32641324e1c2" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321381 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5944a1b-2616-48bd-9695-32641324e1c2" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321393 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321401 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321413 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321421 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321431 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321438 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321453 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321460 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321473 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321479 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321492 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321498 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321511 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerName="nova-cell0-conductor-conductor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321518 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerName="nova-cell0-conductor-conductor" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321531 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321539 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321552 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="ovsdbserver-sb" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321559 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="ovsdbserver-sb" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321569 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" containerName="galera" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321576 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" containerName="galera" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321585 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321592 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321607 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321615 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-api" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321625 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3273dc-a6fa-43b7-8225-7a175f55da77" containerName="nova-scheduler-scheduler" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321634 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3273dc-a6fa-43b7-8225-7a175f55da77" containerName="nova-scheduler-scheduler" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321642 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321650 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321665 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="openstack-network-exporter" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321672 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="openstack-network-exporter" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321681 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-expirer" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321689 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-expirer" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321698 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerName="rabbitmq" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321705 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerName="rabbitmq" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321715 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321723 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321738 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-metadata" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321745 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-metadata" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321755 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321762 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321774 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321783 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-server" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321792 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-reaper" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321799 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-reaper" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321815 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7b7503-cc0d-48fe-be8a-75f2362edebf" containerName="keystone-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321821 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7b7503-cc0d-48fe-be8a-75f2362edebf" containerName="keystone-api" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321833 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4542a25b-82fa-419d-a6bb-8a2f653d88a1" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321840 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4542a25b-82fa-419d-a6bb-8a2f653d88a1" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321852 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="ovn-northd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321860 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="ovn-northd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321873 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="sg-core" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321880 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="sg-core" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321894 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321901 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321917 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee22f7f3-e4e8-4166-87bd-ca7833654410" containerName="nova-cell1-conductor-conductor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321924 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee22f7f3-e4e8-4166-87bd-ca7833654410" containerName="nova-cell1-conductor-conductor" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321934 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c000a8-9862-4518-87aa-d818a118973c" containerName="memcached" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321943 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c000a8-9862-4518-87aa-d818a118973c" containerName="memcached" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321958 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321966 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321974 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="probe" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321981 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="probe" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.321990 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-updater" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.321997 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-updater" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322007 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322014 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-server" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322027 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322034 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-api" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322050 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322058 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322068 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322076 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-server" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322091 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322098 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322114 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322136 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322147 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322155 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-server" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322171 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322179 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322191 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322197 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener-log" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322210 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerName="setup-container" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322217 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerName="setup-container" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322227 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="galera" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322234 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="galera" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322247 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322254 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322263 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca53b222-8018-4445-aa86-5401dbc847b5" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322271 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca53b222-8018-4445-aa86-5401dbc847b5" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322285 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="openstack-network-exporter" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322294 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="openstack-network-exporter" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322302 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-updater" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322310 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-updater" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322319 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" containerName="mysql-bootstrap" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322326 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" containerName="mysql-bootstrap" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322335 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerName="rabbitmq" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322341 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerName="rabbitmq" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322350 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" containerName="kube-state-metrics" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322358 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" containerName="kube-state-metrics" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322368 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322374 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322382 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-notification-agent" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322389 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-notification-agent" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322404 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322411 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322424 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322431 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322441 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="mysql-bootstrap" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322449 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="mysql-bootstrap" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322457 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerName="setup-container" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322464 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerName="setup-container" Nov 25 14:59:54 crc kubenswrapper[4879]: E1125 14:59:54.322471 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322478 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322660 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovs-vswitchd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322677 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1814b22-d1b3-4426-9fa2-f613640f63e8" containerName="rabbitmq" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322691 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322701 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc0bc72d-445e-4e4e-a9ee-a72b40f7eff0" containerName="kube-state-metrics" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322712 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-updater" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322722 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="probe" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322733 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322746 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f98073f-daa8-4796-955e-2f7d767d9125" containerName="galera" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322760 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322772 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322780 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="ovsdbserver-sb" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322795 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba4e29f9-f929-4f80-ad4d-f6f1ed7e77dd" containerName="rabbitmq" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322809 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322818 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322828 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca53b222-8018-4445-aa86-5401dbc847b5" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322852 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="swift-recon-cron" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322866 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322876 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cd36dfc-0f8d-4872-a7f8-f8635f3d9dd7" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322886 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a21ccabf-6dd0-4ea4-8dd3-4ecde47996dc" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322899 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3273dc-a6fa-43b7-8225-7a175f55da77" containerName="nova-scheduler-scheduler" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322910 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322922 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5944a1b-2616-48bd-9695-32641324e1c2" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322933 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322944 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a19706bc-9684-4f70-a0e8-9108014cac2f" containerName="galera" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322958 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322966 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322979 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.322993 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="73963218-ce5b-4813-8224-27ad7b69d0b3" containerName="placement-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323006 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4835e527-a539-4cc7-8730-d75f0c5af849" containerName="nova-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323019 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4542a25b-82fa-419d-a6bb-8a2f653d88a1" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323031 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd97ea2c-1bb3-40b0-b83a-af5b7c9c71eb" containerName="barbican-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323045 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323054 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="openstack-network-exporter" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323063 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee22f7f3-e4e8-4166-87bd-ca7833654410" containerName="nova-cell1-conductor-conductor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323075 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f130d5-685b-4b37-89bb-b9536109c8fb" containerName="ovn-northd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323083 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-updater" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323091 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="container-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323103 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="56f930a5-3344-4b7f-90d4-10a4b758e740" containerName="cinder-api-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323110 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcfac5ba-5544-4d76-af22-0c8b6b9028a7" containerName="nova-cell0-conductor-conductor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323137 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323148 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323156 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-replicator" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323167 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e4d89e1-a52e-4778-b0fb-d71662d7cc2b" containerName="barbican-worker-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323181 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a98226c2-37b7-46b6-ba95-ad7fb26e2402" containerName="ovsdb-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323189 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-reaper" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323199 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b33fd48-a5ae-4916-93f5-0675f1cc8bca" containerName="barbican-keystone-listener" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323208 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a901e0fb-8403-4e8d-a1b1-b3ccae942552" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323222 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="account-server" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323232 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="df7b7503-cc0d-48fe-be8a-75f2362edebf" containerName="keystone-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323240 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323249 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea2d8cdd-6c47-4cf7-b336-933762d2c445" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323258 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e81331ae-5592-4d18-8116-ef1ef9520145" containerName="openstack-network-exporter" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323265 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="rsync" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323277 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-central-agent" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323291 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="552e169f-1bf1-4d0b-802a-da9720c6a35d" containerName="proxy-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323304 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="proxy-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323315 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cc2c187-456f-439a-a4b2-33dda7946308" containerName="glance-log" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323323 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd903399-aa23-4f0d-93fc-4c7a5f454750" containerName="cinder-scheduler" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323334 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f9612e1-0c6a-42d0-bed3-1b2bf3ce6af8" containerName="mariadb-account-delete" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323344 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="sg-core" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323351 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6391ba8e-71b4-44d3-8a99-14ff66c61604" containerName="nova-metadata-metadata" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323361 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="36963cc9-ce9a-4f42-81ac-1a5afde50592" containerName="neutron-api" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323373 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-auditor" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323383 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c000a8-9862-4518-87aa-d818a118973c" containerName="memcached" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323395 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="734e5d8b-907c-4246-adca-6a05a98c0b27" containerName="object-expirer" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323406 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d41b8b48-a0d1-4f8c-b8e8-96eee57a9743" containerName="glance-httpd" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.323413 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2241b679-a172-4455-8fed-c31014efe301" containerName="ceilometer-notification-agent" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.324748 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.342953 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z67k7"] Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.477707 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqwxh\" (UniqueName: \"kubernetes.io/projected/27da90be-2be8-4f28-b590-937948292a8e-kube-api-access-bqwxh\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.478109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-utilities\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.478244 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-catalog-content\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.579449 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqwxh\" (UniqueName: \"kubernetes.io/projected/27da90be-2be8-4f28-b590-937948292a8e-kube-api-access-bqwxh\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.579507 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-utilities\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.579590 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-catalog-content\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.580189 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-catalog-content\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.580620 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-utilities\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.601693 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqwxh\" (UniqueName: \"kubernetes.io/projected/27da90be-2be8-4f28-b590-937948292a8e-kube-api-access-bqwxh\") pod \"redhat-operators-z67k7\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.642934 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 14:59:54 crc kubenswrapper[4879]: I1125 14:59:54.914558 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z67k7"] Nov 25 14:59:55 crc kubenswrapper[4879]: I1125 14:59:55.021402 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z67k7" event={"ID":"27da90be-2be8-4f28-b590-937948292a8e","Type":"ContainerStarted","Data":"c302c9e33bc45317f94518c3a5037064f35ab3d743a834b5de11459cb3b6d38f"} Nov 25 14:59:56 crc kubenswrapper[4879]: I1125 14:59:56.028917 4879 generic.go:334] "Generic (PLEG): container finished" podID="27da90be-2be8-4f28-b590-937948292a8e" containerID="5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633" exitCode=0 Nov 25 14:59:56 crc kubenswrapper[4879]: I1125 14:59:56.029013 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z67k7" event={"ID":"27da90be-2be8-4f28-b590-937948292a8e","Type":"ContainerDied","Data":"5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633"} Nov 25 14:59:56 crc kubenswrapper[4879]: I1125 14:59:56.030735 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 14:59:58 crc kubenswrapper[4879]: I1125 14:59:58.054866 4879 generic.go:334] "Generic (PLEG): container finished" podID="27da90be-2be8-4f28-b590-937948292a8e" containerID="3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd" exitCode=0 Nov 25 14:59:58 crc kubenswrapper[4879]: I1125 14:59:58.054921 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z67k7" event={"ID":"27da90be-2be8-4f28-b590-937948292a8e","Type":"ContainerDied","Data":"3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd"} Nov 25 14:59:59 crc kubenswrapper[4879]: I1125 14:59:59.066254 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z67k7" event={"ID":"27da90be-2be8-4f28-b590-937948292a8e","Type":"ContainerStarted","Data":"0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4"} Nov 25 14:59:59 crc kubenswrapper[4879]: I1125 14:59:59.089166 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z67k7" podStartSLOduration=2.664123458 podStartE2EDuration="5.089144877s" podCreationTimestamp="2025-11-25 14:59:54 +0000 UTC" firstStartedPulling="2025-11-25 14:59:56.030504038 +0000 UTC m=+2087.633917109" lastFinishedPulling="2025-11-25 14:59:58.455525457 +0000 UTC m=+2090.058938528" observedRunningTime="2025-11-25 14:59:59.081777358 +0000 UTC m=+2090.685190439" watchObservedRunningTime="2025-11-25 14:59:59.089144877 +0000 UTC m=+2090.692557948" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.163339 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss"] Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.164442 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.166610 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.166801 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.188759 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss"] Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.260964 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmg5d\" (UniqueName: \"kubernetes.io/projected/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-kube-api-access-qmg5d\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.261041 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-config-volume\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.261084 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-secret-volume\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.363804 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmg5d\" (UniqueName: \"kubernetes.io/projected/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-kube-api-access-qmg5d\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.363912 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-config-volume\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.363977 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-secret-volume\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.366112 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-config-volume\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.374865 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-secret-volume\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.391351 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmg5d\" (UniqueName: \"kubernetes.io/projected/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-kube-api-access-qmg5d\") pod \"collect-profiles-29401380-gvtss\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.507962 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:00 crc kubenswrapper[4879]: I1125 15:00:00.950192 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss"] Nov 25 15:00:01 crc kubenswrapper[4879]: I1125 15:00:01.080413 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" event={"ID":"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52","Type":"ContainerStarted","Data":"ad196d6a672be17b1815865f2aa821abdaf97d0369198b70a987249a7dc217a2"} Nov 25 15:00:02 crc kubenswrapper[4879]: I1125 15:00:02.090429 4879 generic.go:334] "Generic (PLEG): container finished" podID="a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" containerID="bf29b7d952df2d9d2ec65f9fee0fb0b0d092d1d27e181b9f4059f176c47b8889" exitCode=0 Nov 25 15:00:02 crc kubenswrapper[4879]: I1125 15:00:02.090488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" event={"ID":"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52","Type":"ContainerDied","Data":"bf29b7d952df2d9d2ec65f9fee0fb0b0d092d1d27e181b9f4059f176c47b8889"} Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.384185 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.508462 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-secret-volume\") pod \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.508546 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-config-volume\") pod \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.508576 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmg5d\" (UniqueName: \"kubernetes.io/projected/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-kube-api-access-qmg5d\") pod \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\" (UID: \"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52\") " Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.509590 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-config-volume" (OuterVolumeSpecName: "config-volume") pod "a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" (UID: "a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.514464 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" (UID: "a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.514482 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-kube-api-access-qmg5d" (OuterVolumeSpecName: "kube-api-access-qmg5d") pod "a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" (UID: "a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52"). InnerVolumeSpecName "kube-api-access-qmg5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.610842 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.611141 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:00:03 crc kubenswrapper[4879]: I1125 15:00:03.611153 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmg5d\" (UniqueName: \"kubernetes.io/projected/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52-kube-api-access-qmg5d\") on node \"crc\" DevicePath \"\"" Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.111788 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" event={"ID":"a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52","Type":"ContainerDied","Data":"ad196d6a672be17b1815865f2aa821abdaf97d0369198b70a987249a7dc217a2"} Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.111829 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad196d6a672be17b1815865f2aa821abdaf97d0369198b70a987249a7dc217a2" Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.111932 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss" Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.466286 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4"] Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.473770 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401335-qf2x4"] Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.643424 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.643479 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 15:00:04 crc kubenswrapper[4879]: I1125 15:00:04.685461 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 15:00:05 crc kubenswrapper[4879]: I1125 15:00:05.174891 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 15:00:05 crc kubenswrapper[4879]: I1125 15:00:05.228026 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z67k7"] Nov 25 15:00:05 crc kubenswrapper[4879]: I1125 15:00:05.655102 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92593502-485f-47ee-aba3-392741b2740a" path="/var/lib/kubelet/pods/92593502-485f-47ee-aba3-392741b2740a/volumes" Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.148780 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z67k7" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="registry-server" containerID="cri-o://0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4" gracePeriod=2 Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.513393 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.682390 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-catalog-content\") pod \"27da90be-2be8-4f28-b590-937948292a8e\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.682815 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqwxh\" (UniqueName: \"kubernetes.io/projected/27da90be-2be8-4f28-b590-937948292a8e-kube-api-access-bqwxh\") pod \"27da90be-2be8-4f28-b590-937948292a8e\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.683005 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-utilities\") pod \"27da90be-2be8-4f28-b590-937948292a8e\" (UID: \"27da90be-2be8-4f28-b590-937948292a8e\") " Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.683871 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-utilities" (OuterVolumeSpecName: "utilities") pod "27da90be-2be8-4f28-b590-937948292a8e" (UID: "27da90be-2be8-4f28-b590-937948292a8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.709389 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27da90be-2be8-4f28-b590-937948292a8e-kube-api-access-bqwxh" (OuterVolumeSpecName: "kube-api-access-bqwxh") pod "27da90be-2be8-4f28-b590-937948292a8e" (UID: "27da90be-2be8-4f28-b590-937948292a8e"). InnerVolumeSpecName "kube-api-access-bqwxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.784524 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:00:07 crc kubenswrapper[4879]: I1125 15:00:07.784566 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqwxh\" (UniqueName: \"kubernetes.io/projected/27da90be-2be8-4f28-b590-937948292a8e-kube-api-access-bqwxh\") on node \"crc\" DevicePath \"\"" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.157854 4879 generic.go:334] "Generic (PLEG): container finished" podID="27da90be-2be8-4f28-b590-937948292a8e" containerID="0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4" exitCode=0 Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.157917 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z67k7" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.158856 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z67k7" event={"ID":"27da90be-2be8-4f28-b590-937948292a8e","Type":"ContainerDied","Data":"0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4"} Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.158966 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z67k7" event={"ID":"27da90be-2be8-4f28-b590-937948292a8e","Type":"ContainerDied","Data":"c302c9e33bc45317f94518c3a5037064f35ab3d743a834b5de11459cb3b6d38f"} Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.159047 4879 scope.go:117] "RemoveContainer" containerID="0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.176700 4879 scope.go:117] "RemoveContainer" containerID="3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.195742 4879 scope.go:117] "RemoveContainer" containerID="5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.220406 4879 scope.go:117] "RemoveContainer" containerID="0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4" Nov 25 15:00:08 crc kubenswrapper[4879]: E1125 15:00:08.220802 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4\": container with ID starting with 0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4 not found: ID does not exist" containerID="0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.220837 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4"} err="failed to get container status \"0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4\": rpc error: code = NotFound desc = could not find container \"0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4\": container with ID starting with 0be8b158235215e8ef2c21374dd4fd6ce3e2169961ffd9ebffffaa46744619f4 not found: ID does not exist" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.220861 4879 scope.go:117] "RemoveContainer" containerID="3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd" Nov 25 15:00:08 crc kubenswrapper[4879]: E1125 15:00:08.221061 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd\": container with ID starting with 3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd not found: ID does not exist" containerID="3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.221087 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd"} err="failed to get container status \"3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd\": rpc error: code = NotFound desc = could not find container \"3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd\": container with ID starting with 3ac187a54c18b173f11643139bb1d091775ec06d11a6a3644d9df2534213accd not found: ID does not exist" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.221100 4879 scope.go:117] "RemoveContainer" containerID="5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633" Nov 25 15:00:08 crc kubenswrapper[4879]: E1125 15:00:08.221440 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633\": container with ID starting with 5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633 not found: ID does not exist" containerID="5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633" Nov 25 15:00:08 crc kubenswrapper[4879]: I1125 15:00:08.221466 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633"} err="failed to get container status \"5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633\": rpc error: code = NotFound desc = could not find container \"5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633\": container with ID starting with 5e0c6a921b1bf29f70aa4a071a7bedb19ee7091f7f02163472db774036406633 not found: ID does not exist" Nov 25 15:00:09 crc kubenswrapper[4879]: I1125 15:00:09.218260 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27da90be-2be8-4f28-b590-937948292a8e" (UID: "27da90be-2be8-4f28-b590-937948292a8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:00:09 crc kubenswrapper[4879]: I1125 15:00:09.304025 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27da90be-2be8-4f28-b590-937948292a8e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:00:09 crc kubenswrapper[4879]: I1125 15:00:09.386286 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z67k7"] Nov 25 15:00:09 crc kubenswrapper[4879]: I1125 15:00:09.393208 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z67k7"] Nov 25 15:00:09 crc kubenswrapper[4879]: I1125 15:00:09.653603 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27da90be-2be8-4f28-b590-937948292a8e" path="/var/lib/kubelet/pods/27da90be-2be8-4f28-b590-937948292a8e/volumes" Nov 25 15:00:17 crc kubenswrapper[4879]: I1125 15:00:17.408675 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:00:17 crc kubenswrapper[4879]: I1125 15:00:17.409758 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.762003 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:00:39 crc kubenswrapper[4879]: E1125 15:00:39.762860 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="extract-content" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.762875 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="extract-content" Nov 25 15:00:39 crc kubenswrapper[4879]: E1125 15:00:39.762889 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" containerName="collect-profiles" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.762896 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" containerName="collect-profiles" Nov 25 15:00:39 crc kubenswrapper[4879]: E1125 15:00:39.762933 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="registry-server" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.762942 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="registry-server" Nov 25 15:00:39 crc kubenswrapper[4879]: E1125 15:00:39.762952 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="extract-utilities" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.762960 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="extract-utilities" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.763139 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" containerName="collect-profiles" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.763158 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="27da90be-2be8-4f28-b590-937948292a8e" containerName="registry-server" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.764078 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.788337 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.877290 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-catalog-content\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.877341 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6p2t\" (UniqueName: \"kubernetes.io/projected/7e40db6b-1e76-4192-99bc-900d85e8e10f-kube-api-access-h6p2t\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.877374 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-utilities\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.978685 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-catalog-content\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.978741 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6p2t\" (UniqueName: \"kubernetes.io/projected/7e40db6b-1e76-4192-99bc-900d85e8e10f-kube-api-access-h6p2t\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.978786 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-utilities\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.979242 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-catalog-content\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.979276 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-utilities\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:39 crc kubenswrapper[4879]: I1125 15:00:39.999549 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6p2t\" (UniqueName: \"kubernetes.io/projected/7e40db6b-1e76-4192-99bc-900d85e8e10f-kube-api-access-h6p2t\") pod \"community-operators-8qnz8\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.085160 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.398653 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.403393 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8qnz8" event={"ID":"7e40db6b-1e76-4192-99bc-900d85e8e10f","Type":"ContainerStarted","Data":"aacdfca232f823afa9b76bb36b530b2bebeee582fa066f3dca4bb808a845c373"} Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.796993 4879 scope.go:117] "RemoveContainer" containerID="d0302612476ccf74c6889b66b2329d309c9a5c72dd0400e67fa30365f9b4ac12" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.817342 4879 scope.go:117] "RemoveContainer" containerID="8a0dd2eaef1407f0c4a4c8222672807747463c45e6a7033fe52d780b921e0356" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.839876 4879 scope.go:117] "RemoveContainer" containerID="82900f612c16251c224cea50e5e158d9454b14945eaa777b709af96a81ee1880" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.858265 4879 scope.go:117] "RemoveContainer" containerID="55880395db69d8e89d332ff4acd427f5ad61a8c22f2c6fc5803cf83bd5ee049c" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.877065 4879 scope.go:117] "RemoveContainer" containerID="e30b889f7f7242b2bc6830e7603d16e46068abbe4accb0fd07e1aecec86ed667" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.897245 4879 scope.go:117] "RemoveContainer" containerID="672b5efbcdf268d653fbcef5135f5823cee5ae4a86ddaa91ff9d8056bcb00fc4" Nov 25 15:00:40 crc kubenswrapper[4879]: I1125 15:00:40.915093 4879 scope.go:117] "RemoveContainer" containerID="36424a820326e2bcb9b581c795d4ca353c4c72c54677e8a6f4d43be34d1f7512" Nov 25 15:00:41 crc kubenswrapper[4879]: I1125 15:00:41.411942 4879 generic.go:334] "Generic (PLEG): container finished" podID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerID="75446bd566643b5ebb79edd05f837ef0cc22bbcc0cfcfd6b20b7441745dbd1ac" exitCode=0 Nov 25 15:00:41 crc kubenswrapper[4879]: I1125 15:00:41.412177 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8qnz8" event={"ID":"7e40db6b-1e76-4192-99bc-900d85e8e10f","Type":"ContainerDied","Data":"75446bd566643b5ebb79edd05f837ef0cc22bbcc0cfcfd6b20b7441745dbd1ac"} Nov 25 15:00:46 crc kubenswrapper[4879]: I1125 15:00:46.453059 4879 generic.go:334] "Generic (PLEG): container finished" podID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerID="da1c33d559705dd479f9aa597118455fe50ad4af7be9130c43b407c5bb284447" exitCode=0 Nov 25 15:00:46 crc kubenswrapper[4879]: I1125 15:00:46.453143 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8qnz8" event={"ID":"7e40db6b-1e76-4192-99bc-900d85e8e10f","Type":"ContainerDied","Data":"da1c33d559705dd479f9aa597118455fe50ad4af7be9130c43b407c5bb284447"} Nov 25 15:00:47 crc kubenswrapper[4879]: I1125 15:00:47.409100 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:00:47 crc kubenswrapper[4879]: I1125 15:00:47.409397 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:00:47 crc kubenswrapper[4879]: I1125 15:00:47.463114 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8qnz8" event={"ID":"7e40db6b-1e76-4192-99bc-900d85e8e10f","Type":"ContainerStarted","Data":"4c4409af6b4a5b1d4273f0d040964206a0f47d41c78e92d9f87aad7859b0b3a6"} Nov 25 15:00:47 crc kubenswrapper[4879]: I1125 15:00:47.484303 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8qnz8" podStartSLOduration=3.042253194 podStartE2EDuration="8.484281457s" podCreationTimestamp="2025-11-25 15:00:39 +0000 UTC" firstStartedPulling="2025-11-25 15:00:41.413901676 +0000 UTC m=+2133.017314747" lastFinishedPulling="2025-11-25 15:00:46.855929939 +0000 UTC m=+2138.459343010" observedRunningTime="2025-11-25 15:00:47.478645355 +0000 UTC m=+2139.082058436" watchObservedRunningTime="2025-11-25 15:00:47.484281457 +0000 UTC m=+2139.087694528" Nov 25 15:00:50 crc kubenswrapper[4879]: I1125 15:00:50.086430 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:50 crc kubenswrapper[4879]: I1125 15:00:50.086752 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:00:50 crc kubenswrapper[4879]: I1125 15:00:50.128682 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.128804 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.197826 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.237088 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.237414 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dgc57" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="registry-server" containerID="cri-o://23a4ee28429b9389e47a7ad3e90d47efd3255ea4a9ab88962b18d9c588c9f31a" gracePeriod=2 Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.567484 4879 generic.go:334] "Generic (PLEG): container finished" podID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerID="23a4ee28429b9389e47a7ad3e90d47efd3255ea4a9ab88962b18d9c588c9f31a" exitCode=0 Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.567806 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerDied","Data":"23a4ee28429b9389e47a7ad3e90d47efd3255ea4a9ab88962b18d9c588c9f31a"} Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.704146 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dgc57" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.792561 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb446\" (UniqueName: \"kubernetes.io/projected/de9b900d-139d-4e55-b2e9-48ca61ee770c-kube-api-access-vb446\") pod \"de9b900d-139d-4e55-b2e9-48ca61ee770c\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.792629 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-catalog-content\") pod \"de9b900d-139d-4e55-b2e9-48ca61ee770c\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.792686 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-utilities\") pod \"de9b900d-139d-4e55-b2e9-48ca61ee770c\" (UID: \"de9b900d-139d-4e55-b2e9-48ca61ee770c\") " Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.793368 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-utilities" (OuterVolumeSpecName: "utilities") pod "de9b900d-139d-4e55-b2e9-48ca61ee770c" (UID: "de9b900d-139d-4e55-b2e9-48ca61ee770c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.798380 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de9b900d-139d-4e55-b2e9-48ca61ee770c-kube-api-access-vb446" (OuterVolumeSpecName: "kube-api-access-vb446") pod "de9b900d-139d-4e55-b2e9-48ca61ee770c" (UID: "de9b900d-139d-4e55-b2e9-48ca61ee770c"). InnerVolumeSpecName "kube-api-access-vb446". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.861511 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de9b900d-139d-4e55-b2e9-48ca61ee770c" (UID: "de9b900d-139d-4e55-b2e9-48ca61ee770c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.894724 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.894768 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb446\" (UniqueName: \"kubernetes.io/projected/de9b900d-139d-4e55-b2e9-48ca61ee770c-kube-api-access-vb446\") on node \"crc\" DevicePath \"\"" Nov 25 15:01:00 crc kubenswrapper[4879]: I1125 15:01:00.894784 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de9b900d-139d-4e55-b2e9-48ca61ee770c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.576577 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dgc57" event={"ID":"de9b900d-139d-4e55-b2e9-48ca61ee770c","Type":"ContainerDied","Data":"a32f6e41680b0026d5261ef5d91016fb700b0133b3bbbbdc034d432c4f2c758a"} Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.576635 4879 scope.go:117] "RemoveContainer" containerID="23a4ee28429b9389e47a7ad3e90d47efd3255ea4a9ab88962b18d9c588c9f31a" Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.576806 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dgc57" Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.611133 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.612445 4879 scope.go:117] "RemoveContainer" containerID="095d5d322f205bfe1ff4631ce6a377f7f0e2457fe20c08611104880d04141e31" Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.616704 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dgc57"] Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.634078 4879 scope.go:117] "RemoveContainer" containerID="64d19692f86123741dd1409a6710518668aa34340db16b79795dc427980caafd" Nov 25 15:01:01 crc kubenswrapper[4879]: I1125 15:01:01.654514 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" path="/var/lib/kubelet/pods/de9b900d-139d-4e55-b2e9-48ca61ee770c/volumes" Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.408971 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.409488 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.409535 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.410105 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bac139da3495699b039e7361e407b63eb9bf2bfecd2cb38712afa802bae51e2d"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.410177 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://bac139da3495699b039e7361e407b63eb9bf2bfecd2cb38712afa802bae51e2d" gracePeriod=600 Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.699965 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="bac139da3495699b039e7361e407b63eb9bf2bfecd2cb38712afa802bae51e2d" exitCode=0 Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.700004 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"bac139da3495699b039e7361e407b63eb9bf2bfecd2cb38712afa802bae51e2d"} Nov 25 15:01:17 crc kubenswrapper[4879]: I1125 15:01:17.700282 4879 scope.go:117] "RemoveContainer" containerID="bd7ac0362a2a7f2b4e6bb032492a3027aedd8d9aad00955c44a0b21136ba8bc5" Nov 25 15:01:18 crc kubenswrapper[4879]: I1125 15:01:18.714497 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9"} Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.809470 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ws86d"] Nov 25 15:02:51 crc kubenswrapper[4879]: E1125 15:02:51.813595 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="extract-content" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.813624 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="extract-content" Nov 25 15:02:51 crc kubenswrapper[4879]: E1125 15:02:51.813638 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="registry-server" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.813645 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="registry-server" Nov 25 15:02:51 crc kubenswrapper[4879]: E1125 15:02:51.813663 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="extract-utilities" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.813671 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="extract-utilities" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.813869 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="de9b900d-139d-4e55-b2e9-48ca61ee770c" containerName="registry-server" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.815335 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.821682 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ws86d"] Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.922793 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89dq8\" (UniqueName: \"kubernetes.io/projected/45c932d8-66a5-4eb1-baa0-b99ccd21448e-kube-api-access-89dq8\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.923238 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-catalog-content\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:51 crc kubenswrapper[4879]: I1125 15:02:51.923350 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-utilities\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.024657 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89dq8\" (UniqueName: \"kubernetes.io/projected/45c932d8-66a5-4eb1-baa0-b99ccd21448e-kube-api-access-89dq8\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.025069 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-catalog-content\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.025211 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-utilities\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.025724 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-catalog-content\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.025789 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-utilities\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.047403 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89dq8\" (UniqueName: \"kubernetes.io/projected/45c932d8-66a5-4eb1-baa0-b99ccd21448e-kube-api-access-89dq8\") pod \"redhat-marketplace-ws86d\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.142353 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:02:52 crc kubenswrapper[4879]: I1125 15:02:52.557794 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ws86d"] Nov 25 15:02:53 crc kubenswrapper[4879]: I1125 15:02:53.417999 4879 generic.go:334] "Generic (PLEG): container finished" podID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerID="a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340" exitCode=0 Nov 25 15:02:53 crc kubenswrapper[4879]: I1125 15:02:53.418103 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ws86d" event={"ID":"45c932d8-66a5-4eb1-baa0-b99ccd21448e","Type":"ContainerDied","Data":"a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340"} Nov 25 15:02:53 crc kubenswrapper[4879]: I1125 15:02:53.418333 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ws86d" event={"ID":"45c932d8-66a5-4eb1-baa0-b99ccd21448e","Type":"ContainerStarted","Data":"841fe376d4aec824dff3d3f3ba85b9c9f8684b8b5595daacfe477f743a14c2d7"} Nov 25 15:02:54 crc kubenswrapper[4879]: I1125 15:02:54.427399 4879 generic.go:334] "Generic (PLEG): container finished" podID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerID="c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4" exitCode=0 Nov 25 15:02:54 crc kubenswrapper[4879]: I1125 15:02:54.427538 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ws86d" event={"ID":"45c932d8-66a5-4eb1-baa0-b99ccd21448e","Type":"ContainerDied","Data":"c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4"} Nov 25 15:02:55 crc kubenswrapper[4879]: I1125 15:02:55.437006 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ws86d" event={"ID":"45c932d8-66a5-4eb1-baa0-b99ccd21448e","Type":"ContainerStarted","Data":"e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780"} Nov 25 15:02:55 crc kubenswrapper[4879]: I1125 15:02:55.457865 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ws86d" podStartSLOduration=2.957062281 podStartE2EDuration="4.457845714s" podCreationTimestamp="2025-11-25 15:02:51 +0000 UTC" firstStartedPulling="2025-11-25 15:02:53.419358507 +0000 UTC m=+2265.022771578" lastFinishedPulling="2025-11-25 15:02:54.92014194 +0000 UTC m=+2266.523555011" observedRunningTime="2025-11-25 15:02:55.456799865 +0000 UTC m=+2267.060212946" watchObservedRunningTime="2025-11-25 15:02:55.457845714 +0000 UTC m=+2267.061258785" Nov 25 15:03:02 crc kubenswrapper[4879]: I1125 15:03:02.142516 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:03:02 crc kubenswrapper[4879]: I1125 15:03:02.143311 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:03:02 crc kubenswrapper[4879]: I1125 15:03:02.186800 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:03:02 crc kubenswrapper[4879]: I1125 15:03:02.543500 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:03:02 crc kubenswrapper[4879]: I1125 15:03:02.591214 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ws86d"] Nov 25 15:03:04 crc kubenswrapper[4879]: I1125 15:03:04.514572 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ws86d" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="registry-server" containerID="cri-o://e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780" gracePeriod=2 Nov 25 15:03:04 crc kubenswrapper[4879]: I1125 15:03:04.926009 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.016928 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-catalog-content\") pod \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.017089 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89dq8\" (UniqueName: \"kubernetes.io/projected/45c932d8-66a5-4eb1-baa0-b99ccd21448e-kube-api-access-89dq8\") pod \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.017199 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-utilities\") pod \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\" (UID: \"45c932d8-66a5-4eb1-baa0-b99ccd21448e\") " Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.018183 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-utilities" (OuterVolumeSpecName: "utilities") pod "45c932d8-66a5-4eb1-baa0-b99ccd21448e" (UID: "45c932d8-66a5-4eb1-baa0-b99ccd21448e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.023976 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45c932d8-66a5-4eb1-baa0-b99ccd21448e-kube-api-access-89dq8" (OuterVolumeSpecName: "kube-api-access-89dq8") pod "45c932d8-66a5-4eb1-baa0-b99ccd21448e" (UID: "45c932d8-66a5-4eb1-baa0-b99ccd21448e"). InnerVolumeSpecName "kube-api-access-89dq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.036958 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "45c932d8-66a5-4eb1-baa0-b99ccd21448e" (UID: "45c932d8-66a5-4eb1-baa0-b99ccd21448e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.118932 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89dq8\" (UniqueName: \"kubernetes.io/projected/45c932d8-66a5-4eb1-baa0-b99ccd21448e-kube-api-access-89dq8\") on node \"crc\" DevicePath \"\"" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.119309 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.119321 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/45c932d8-66a5-4eb1-baa0-b99ccd21448e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.527467 4879 generic.go:334] "Generic (PLEG): container finished" podID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerID="e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780" exitCode=0 Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.527550 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ws86d" event={"ID":"45c932d8-66a5-4eb1-baa0-b99ccd21448e","Type":"ContainerDied","Data":"e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780"} Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.527597 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ws86d" event={"ID":"45c932d8-66a5-4eb1-baa0-b99ccd21448e","Type":"ContainerDied","Data":"841fe376d4aec824dff3d3f3ba85b9c9f8684b8b5595daacfe477f743a14c2d7"} Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.527623 4879 scope.go:117] "RemoveContainer" containerID="e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.527735 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ws86d" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.551806 4879 scope.go:117] "RemoveContainer" containerID="c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.567688 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ws86d"] Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.574631 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ws86d"] Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.595435 4879 scope.go:117] "RemoveContainer" containerID="a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.615327 4879 scope.go:117] "RemoveContainer" containerID="e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780" Nov 25 15:03:05 crc kubenswrapper[4879]: E1125 15:03:05.615768 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780\": container with ID starting with e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780 not found: ID does not exist" containerID="e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.615826 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780"} err="failed to get container status \"e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780\": rpc error: code = NotFound desc = could not find container \"e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780\": container with ID starting with e0369d296c47da9f6997e5f4ee7210c56bf62a3760361d1a72951d2c8d3cd780 not found: ID does not exist" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.615864 4879 scope.go:117] "RemoveContainer" containerID="c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4" Nov 25 15:03:05 crc kubenswrapper[4879]: E1125 15:03:05.616365 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4\": container with ID starting with c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4 not found: ID does not exist" containerID="c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.616397 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4"} err="failed to get container status \"c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4\": rpc error: code = NotFound desc = could not find container \"c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4\": container with ID starting with c5ba3b7c67e4bdd36aae3db9746fb2cbac4879f57c3dbc80d6a1e196ac3dbbc4 not found: ID does not exist" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.616418 4879 scope.go:117] "RemoveContainer" containerID="a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340" Nov 25 15:03:05 crc kubenswrapper[4879]: E1125 15:03:05.616733 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340\": container with ID starting with a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340 not found: ID does not exist" containerID="a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.616772 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340"} err="failed to get container status \"a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340\": rpc error: code = NotFound desc = could not find container \"a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340\": container with ID starting with a41168f7b4832a4f5660389f4e8f7fcf3ac71ce4f49afd674f6748900ed91340 not found: ID does not exist" Nov 25 15:03:05 crc kubenswrapper[4879]: I1125 15:03:05.661322 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" path="/var/lib/kubelet/pods/45c932d8-66a5-4eb1-baa0-b99ccd21448e/volumes" Nov 25 15:03:17 crc kubenswrapper[4879]: I1125 15:03:17.408717 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:03:17 crc kubenswrapper[4879]: I1125 15:03:17.409325 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.626255 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mpvtn"] Nov 25 15:03:20 crc kubenswrapper[4879]: E1125 15:03:20.626940 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="extract-utilities" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.626957 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="extract-utilities" Nov 25 15:03:20 crc kubenswrapper[4879]: E1125 15:03:20.626984 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="registry-server" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.626992 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="registry-server" Nov 25 15:03:20 crc kubenswrapper[4879]: E1125 15:03:20.627006 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="extract-content" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.627014 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="extract-content" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.627221 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="45c932d8-66a5-4eb1-baa0-b99ccd21448e" containerName="registry-server" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.628557 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.643168 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mpvtn"] Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.747399 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6qdk\" (UniqueName: \"kubernetes.io/projected/2360b036-e793-4b96-bd4a-71d28f460360-kube-api-access-k6qdk\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.747563 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-utilities\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.747673 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-catalog-content\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.848997 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-utilities\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.849100 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-catalog-content\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.849153 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6qdk\" (UniqueName: \"kubernetes.io/projected/2360b036-e793-4b96-bd4a-71d28f460360-kube-api-access-k6qdk\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.849717 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-catalog-content\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.849884 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-utilities\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.868029 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6qdk\" (UniqueName: \"kubernetes.io/projected/2360b036-e793-4b96-bd4a-71d28f460360-kube-api-access-k6qdk\") pod \"certified-operators-mpvtn\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:20 crc kubenswrapper[4879]: I1125 15:03:20.951953 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:21 crc kubenswrapper[4879]: I1125 15:03:21.460508 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mpvtn"] Nov 25 15:03:21 crc kubenswrapper[4879]: I1125 15:03:21.653979 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerStarted","Data":"99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8"} Nov 25 15:03:21 crc kubenswrapper[4879]: I1125 15:03:21.654032 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerStarted","Data":"a94ffe2938de790c49af939233fe1f7b8cbc28fe7c652deed6fc9c29d66fa83c"} Nov 25 15:03:22 crc kubenswrapper[4879]: I1125 15:03:22.657790 4879 generic.go:334] "Generic (PLEG): container finished" podID="2360b036-e793-4b96-bd4a-71d28f460360" containerID="99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8" exitCode=0 Nov 25 15:03:22 crc kubenswrapper[4879]: I1125 15:03:22.658087 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerDied","Data":"99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8"} Nov 25 15:03:23 crc kubenswrapper[4879]: I1125 15:03:23.670670 4879 generic.go:334] "Generic (PLEG): container finished" podID="2360b036-e793-4b96-bd4a-71d28f460360" containerID="c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41" exitCode=0 Nov 25 15:03:23 crc kubenswrapper[4879]: I1125 15:03:23.670723 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerDied","Data":"c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41"} Nov 25 15:03:24 crc kubenswrapper[4879]: I1125 15:03:24.680214 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerStarted","Data":"8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1"} Nov 25 15:03:24 crc kubenswrapper[4879]: I1125 15:03:24.701942 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mpvtn" podStartSLOduration=3.015697888 podStartE2EDuration="4.70192013s" podCreationTimestamp="2025-11-25 15:03:20 +0000 UTC" firstStartedPulling="2025-11-25 15:03:22.663497396 +0000 UTC m=+2294.266910467" lastFinishedPulling="2025-11-25 15:03:24.349719638 +0000 UTC m=+2295.953132709" observedRunningTime="2025-11-25 15:03:24.698660662 +0000 UTC m=+2296.302073753" watchObservedRunningTime="2025-11-25 15:03:24.70192013 +0000 UTC m=+2296.305333201" Nov 25 15:03:30 crc kubenswrapper[4879]: I1125 15:03:30.952552 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:30 crc kubenswrapper[4879]: I1125 15:03:30.963881 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:31 crc kubenswrapper[4879]: I1125 15:03:31.054190 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:31 crc kubenswrapper[4879]: I1125 15:03:31.783052 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:32 crc kubenswrapper[4879]: I1125 15:03:32.017904 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mpvtn"] Nov 25 15:03:33 crc kubenswrapper[4879]: I1125 15:03:33.765679 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mpvtn" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="registry-server" containerID="cri-o://8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1" gracePeriod=2 Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.216697 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.274810 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6qdk\" (UniqueName: \"kubernetes.io/projected/2360b036-e793-4b96-bd4a-71d28f460360-kube-api-access-k6qdk\") pod \"2360b036-e793-4b96-bd4a-71d28f460360\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.275462 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-catalog-content\") pod \"2360b036-e793-4b96-bd4a-71d28f460360\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.275511 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-utilities\") pod \"2360b036-e793-4b96-bd4a-71d28f460360\" (UID: \"2360b036-e793-4b96-bd4a-71d28f460360\") " Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.276860 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-utilities" (OuterVolumeSpecName: "utilities") pod "2360b036-e793-4b96-bd4a-71d28f460360" (UID: "2360b036-e793-4b96-bd4a-71d28f460360"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.284030 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2360b036-e793-4b96-bd4a-71d28f460360-kube-api-access-k6qdk" (OuterVolumeSpecName: "kube-api-access-k6qdk") pod "2360b036-e793-4b96-bd4a-71d28f460360" (UID: "2360b036-e793-4b96-bd4a-71d28f460360"). InnerVolumeSpecName "kube-api-access-k6qdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.377808 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6qdk\" (UniqueName: \"kubernetes.io/projected/2360b036-e793-4b96-bd4a-71d28f460360-kube-api-access-k6qdk\") on node \"crc\" DevicePath \"\"" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.377863 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.507527 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2360b036-e793-4b96-bd4a-71d28f460360" (UID: "2360b036-e793-4b96-bd4a-71d28f460360"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.579920 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2360b036-e793-4b96-bd4a-71d28f460360-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.775232 4879 generic.go:334] "Generic (PLEG): container finished" podID="2360b036-e793-4b96-bd4a-71d28f460360" containerID="8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1" exitCode=0 Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.775282 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerDied","Data":"8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1"} Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.775312 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpvtn" event={"ID":"2360b036-e793-4b96-bd4a-71d28f460360","Type":"ContainerDied","Data":"a94ffe2938de790c49af939233fe1f7b8cbc28fe7c652deed6fc9c29d66fa83c"} Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.775329 4879 scope.go:117] "RemoveContainer" containerID="8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.775479 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpvtn" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.804037 4879 scope.go:117] "RemoveContainer" containerID="c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.813052 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mpvtn"] Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.821624 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mpvtn"] Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.835297 4879 scope.go:117] "RemoveContainer" containerID="99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.853624 4879 scope.go:117] "RemoveContainer" containerID="8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1" Nov 25 15:03:34 crc kubenswrapper[4879]: E1125 15:03:34.854192 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1\": container with ID starting with 8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1 not found: ID does not exist" containerID="8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.854232 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1"} err="failed to get container status \"8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1\": rpc error: code = NotFound desc = could not find container \"8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1\": container with ID starting with 8753f7dc3cb3a640b4a5d2f592553e906669eb816ccc1f42f62306109e3a2af1 not found: ID does not exist" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.854261 4879 scope.go:117] "RemoveContainer" containerID="c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41" Nov 25 15:03:34 crc kubenswrapper[4879]: E1125 15:03:34.854903 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41\": container with ID starting with c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41 not found: ID does not exist" containerID="c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.854927 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41"} err="failed to get container status \"c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41\": rpc error: code = NotFound desc = could not find container \"c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41\": container with ID starting with c24b032fbdfa9498c6fde89906aca0e2cd740db35b033c3bb244caf598bd7b41 not found: ID does not exist" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.854992 4879 scope.go:117] "RemoveContainer" containerID="99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8" Nov 25 15:03:34 crc kubenswrapper[4879]: E1125 15:03:34.855342 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8\": container with ID starting with 99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8 not found: ID does not exist" containerID="99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8" Nov 25 15:03:34 crc kubenswrapper[4879]: I1125 15:03:34.855379 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8"} err="failed to get container status \"99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8\": rpc error: code = NotFound desc = could not find container \"99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8\": container with ID starting with 99cd947b42633ca288f58a38505acc24cc24db90d25f48a0ea90314d231a05c8 not found: ID does not exist" Nov 25 15:03:35 crc kubenswrapper[4879]: I1125 15:03:35.652972 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2360b036-e793-4b96-bd4a-71d28f460360" path="/var/lib/kubelet/pods/2360b036-e793-4b96-bd4a-71d28f460360/volumes" Nov 25 15:03:47 crc kubenswrapper[4879]: I1125 15:03:47.408513 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:03:47 crc kubenswrapper[4879]: I1125 15:03:47.409069 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:04:17 crc kubenswrapper[4879]: I1125 15:04:17.408788 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:04:17 crc kubenswrapper[4879]: I1125 15:04:17.409438 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:04:17 crc kubenswrapper[4879]: I1125 15:04:17.409485 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:04:17 crc kubenswrapper[4879]: I1125 15:04:17.411581 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:04:17 crc kubenswrapper[4879]: I1125 15:04:17.411696 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" gracePeriod=600 Nov 25 15:04:17 crc kubenswrapper[4879]: E1125 15:04:17.532879 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:04:18 crc kubenswrapper[4879]: I1125 15:04:18.121588 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" exitCode=0 Nov 25 15:04:18 crc kubenswrapper[4879]: I1125 15:04:18.121632 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9"} Nov 25 15:04:18 crc kubenswrapper[4879]: I1125 15:04:18.121663 4879 scope.go:117] "RemoveContainer" containerID="bac139da3495699b039e7361e407b63eb9bf2bfecd2cb38712afa802bae51e2d" Nov 25 15:04:18 crc kubenswrapper[4879]: I1125 15:04:18.122522 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:04:18 crc kubenswrapper[4879]: E1125 15:04:18.122880 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:04:29 crc kubenswrapper[4879]: I1125 15:04:29.650653 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:04:29 crc kubenswrapper[4879]: E1125 15:04:29.651436 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:04:41 crc kubenswrapper[4879]: I1125 15:04:41.645664 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:04:41 crc kubenswrapper[4879]: E1125 15:04:41.646441 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:04:55 crc kubenswrapper[4879]: I1125 15:04:55.645114 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:04:55 crc kubenswrapper[4879]: E1125 15:04:55.645919 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:05:07 crc kubenswrapper[4879]: I1125 15:05:07.648470 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:05:07 crc kubenswrapper[4879]: E1125 15:05:07.649142 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:05:22 crc kubenswrapper[4879]: I1125 15:05:22.644858 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:05:22 crc kubenswrapper[4879]: E1125 15:05:22.645560 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:05:33 crc kubenswrapper[4879]: I1125 15:05:33.644818 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:05:33 crc kubenswrapper[4879]: E1125 15:05:33.645431 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:05:45 crc kubenswrapper[4879]: I1125 15:05:45.644242 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:05:45 crc kubenswrapper[4879]: E1125 15:05:45.644984 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:06:00 crc kubenswrapper[4879]: I1125 15:06:00.644878 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:06:00 crc kubenswrapper[4879]: E1125 15:06:00.645646 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:06:12 crc kubenswrapper[4879]: I1125 15:06:12.646079 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:06:12 crc kubenswrapper[4879]: E1125 15:06:12.647426 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:06:25 crc kubenswrapper[4879]: I1125 15:06:25.645477 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:06:25 crc kubenswrapper[4879]: E1125 15:06:25.648843 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:06:40 crc kubenswrapper[4879]: I1125 15:06:40.644838 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:06:40 crc kubenswrapper[4879]: E1125 15:06:40.645592 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:06:54 crc kubenswrapper[4879]: I1125 15:06:54.644662 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:06:54 crc kubenswrapper[4879]: E1125 15:06:54.645404 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:07:07 crc kubenswrapper[4879]: I1125 15:07:07.644570 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:07:07 crc kubenswrapper[4879]: E1125 15:07:07.645421 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:07:19 crc kubenswrapper[4879]: I1125 15:07:19.650055 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:07:19 crc kubenswrapper[4879]: E1125 15:07:19.650918 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:07:30 crc kubenswrapper[4879]: I1125 15:07:30.645320 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:07:30 crc kubenswrapper[4879]: E1125 15:07:30.646047 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:07:45 crc kubenswrapper[4879]: I1125 15:07:45.644465 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:07:45 crc kubenswrapper[4879]: E1125 15:07:45.645452 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:07:57 crc kubenswrapper[4879]: I1125 15:07:57.645036 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:07:57 crc kubenswrapper[4879]: E1125 15:07:57.646224 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:08:11 crc kubenswrapper[4879]: I1125 15:08:11.644642 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:08:11 crc kubenswrapper[4879]: E1125 15:08:11.645424 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:08:25 crc kubenswrapper[4879]: I1125 15:08:25.644915 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:08:25 crc kubenswrapper[4879]: E1125 15:08:25.645818 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:08:36 crc kubenswrapper[4879]: I1125 15:08:36.645056 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:08:36 crc kubenswrapper[4879]: E1125 15:08:36.645586 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:08:49 crc kubenswrapper[4879]: I1125 15:08:49.649668 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:08:49 crc kubenswrapper[4879]: E1125 15:08:49.650565 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:09:02 crc kubenswrapper[4879]: I1125 15:09:02.645011 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:09:02 crc kubenswrapper[4879]: E1125 15:09:02.645652 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:09:17 crc kubenswrapper[4879]: I1125 15:09:17.644501 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:09:18 crc kubenswrapper[4879]: I1125 15:09:18.471999 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"d93f5cfe8db7c71c027309f43f73ec932ef734072eb2e85e891da7db120da0e4"} Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.465975 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-42t5z"] Nov 25 15:10:35 crc kubenswrapper[4879]: E1125 15:10:35.467141 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="extract-utilities" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.467166 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="extract-utilities" Nov 25 15:10:35 crc kubenswrapper[4879]: E1125 15:10:35.467185 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="extract-content" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.467193 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="extract-content" Nov 25 15:10:35 crc kubenswrapper[4879]: E1125 15:10:35.467214 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="registry-server" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.467223 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="registry-server" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.467418 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2360b036-e793-4b96-bd4a-71d28f460360" containerName="registry-server" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.468970 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.487151 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-42t5z"] Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.620323 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scx94\" (UniqueName: \"kubernetes.io/projected/4889c4d8-84da-4397-9b01-fa5f5695887b-kube-api-access-scx94\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.620453 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4889c4d8-84da-4397-9b01-fa5f5695887b-catalog-content\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.620500 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4889c4d8-84da-4397-9b01-fa5f5695887b-utilities\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.721560 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scx94\" (UniqueName: \"kubernetes.io/projected/4889c4d8-84da-4397-9b01-fa5f5695887b-kube-api-access-scx94\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.721666 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4889c4d8-84da-4397-9b01-fa5f5695887b-catalog-content\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.721697 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4889c4d8-84da-4397-9b01-fa5f5695887b-utilities\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.722187 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4889c4d8-84da-4397-9b01-fa5f5695887b-utilities\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.722264 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4889c4d8-84da-4397-9b01-fa5f5695887b-catalog-content\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.754904 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scx94\" (UniqueName: \"kubernetes.io/projected/4889c4d8-84da-4397-9b01-fa5f5695887b-kube-api-access-scx94\") pod \"redhat-operators-42t5z\" (UID: \"4889c4d8-84da-4397-9b01-fa5f5695887b\") " pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:35 crc kubenswrapper[4879]: I1125 15:10:35.788920 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:36 crc kubenswrapper[4879]: I1125 15:10:36.207686 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-42t5z"] Nov 25 15:10:37 crc kubenswrapper[4879]: I1125 15:10:37.058902 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42t5z" event={"ID":"4889c4d8-84da-4397-9b01-fa5f5695887b","Type":"ContainerStarted","Data":"2714522fc6ddf864b7916622e790e6422d8da3c25e09c3d9b257fd098d240a5e"} Nov 25 15:10:38 crc kubenswrapper[4879]: I1125 15:10:38.067260 4879 generic.go:334] "Generic (PLEG): container finished" podID="4889c4d8-84da-4397-9b01-fa5f5695887b" containerID="35da8753961ac071f831d5ec86f7884e14bd81d9f725b90cd99bb00d93cad4c5" exitCode=0 Nov 25 15:10:38 crc kubenswrapper[4879]: I1125 15:10:38.067333 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42t5z" event={"ID":"4889c4d8-84da-4397-9b01-fa5f5695887b","Type":"ContainerDied","Data":"35da8753961ac071f831d5ec86f7884e14bd81d9f725b90cd99bb00d93cad4c5"} Nov 25 15:10:38 crc kubenswrapper[4879]: I1125 15:10:38.080008 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:10:46 crc kubenswrapper[4879]: I1125 15:10:46.136657 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42t5z" event={"ID":"4889c4d8-84da-4397-9b01-fa5f5695887b","Type":"ContainerStarted","Data":"173d4a0af57a01b183e1463d5073aee68a87b86d7709224da77acc3df5dda159"} Nov 25 15:10:47 crc kubenswrapper[4879]: I1125 15:10:47.147502 4879 generic.go:334] "Generic (PLEG): container finished" podID="4889c4d8-84da-4397-9b01-fa5f5695887b" containerID="173d4a0af57a01b183e1463d5073aee68a87b86d7709224da77acc3df5dda159" exitCode=0 Nov 25 15:10:47 crc kubenswrapper[4879]: I1125 15:10:47.147549 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42t5z" event={"ID":"4889c4d8-84da-4397-9b01-fa5f5695887b","Type":"ContainerDied","Data":"173d4a0af57a01b183e1463d5073aee68a87b86d7709224da77acc3df5dda159"} Nov 25 15:10:48 crc kubenswrapper[4879]: I1125 15:10:48.164750 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-42t5z" event={"ID":"4889c4d8-84da-4397-9b01-fa5f5695887b","Type":"ContainerStarted","Data":"86aa0351fdfc84469670a81d32906fc1c0c9428372c59c9f2c07cfad893bbc73"} Nov 25 15:10:48 crc kubenswrapper[4879]: I1125 15:10:48.195751 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-42t5z" podStartSLOduration=3.42237352 podStartE2EDuration="13.195731995s" podCreationTimestamp="2025-11-25 15:10:35 +0000 UTC" firstStartedPulling="2025-11-25 15:10:38.079642796 +0000 UTC m=+2729.683055867" lastFinishedPulling="2025-11-25 15:10:47.853001271 +0000 UTC m=+2739.456414342" observedRunningTime="2025-11-25 15:10:48.183038801 +0000 UTC m=+2739.786451872" watchObservedRunningTime="2025-11-25 15:10:48.195731995 +0000 UTC m=+2739.799145056" Nov 25 15:10:55 crc kubenswrapper[4879]: I1125 15:10:55.789348 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:55 crc kubenswrapper[4879]: I1125 15:10:55.789841 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:55 crc kubenswrapper[4879]: I1125 15:10:55.829222 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.263239 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-42t5z" Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.319732 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-42t5z"] Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.366754 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fdl65"] Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.367167 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fdl65" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="registry-server" containerID="cri-o://9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0" gracePeriod=2 Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.786011 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.919185 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-catalog-content\") pod \"a5e87bea-0848-477e-b207-0aaac4e1e63a\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.919314 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-utilities\") pod \"a5e87bea-0848-477e-b207-0aaac4e1e63a\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.919378 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj8pp\" (UniqueName: \"kubernetes.io/projected/a5e87bea-0848-477e-b207-0aaac4e1e63a-kube-api-access-xj8pp\") pod \"a5e87bea-0848-477e-b207-0aaac4e1e63a\" (UID: \"a5e87bea-0848-477e-b207-0aaac4e1e63a\") " Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.920093 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-utilities" (OuterVolumeSpecName: "utilities") pod "a5e87bea-0848-477e-b207-0aaac4e1e63a" (UID: "a5e87bea-0848-477e-b207-0aaac4e1e63a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.942653 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5e87bea-0848-477e-b207-0aaac4e1e63a-kube-api-access-xj8pp" (OuterVolumeSpecName: "kube-api-access-xj8pp") pod "a5e87bea-0848-477e-b207-0aaac4e1e63a" (UID: "a5e87bea-0848-477e-b207-0aaac4e1e63a"). InnerVolumeSpecName "kube-api-access-xj8pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:10:56 crc kubenswrapper[4879]: I1125 15:10:56.997905 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5e87bea-0848-477e-b207-0aaac4e1e63a" (UID: "a5e87bea-0848-477e-b207-0aaac4e1e63a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.021729 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.021780 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5e87bea-0848-477e-b207-0aaac4e1e63a-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.021792 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj8pp\" (UniqueName: \"kubernetes.io/projected/a5e87bea-0848-477e-b207-0aaac4e1e63a-kube-api-access-xj8pp\") on node \"crc\" DevicePath \"\"" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.230168 4879 generic.go:334] "Generic (PLEG): container finished" podID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerID="9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0" exitCode=0 Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.230349 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerDied","Data":"9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0"} Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.230386 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdl65" event={"ID":"a5e87bea-0848-477e-b207-0aaac4e1e63a","Type":"ContainerDied","Data":"eb9798244c4fe54c1379f3e2f7768a4ed62db2bf8e25963c812e4dacfa83e20d"} Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.230388 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdl65" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.230432 4879 scope.go:117] "RemoveContainer" containerID="9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.272534 4879 scope.go:117] "RemoveContainer" containerID="fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.277052 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fdl65"] Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.289591 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fdl65"] Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.300943 4879 scope.go:117] "RemoveContainer" containerID="f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.377699 4879 scope.go:117] "RemoveContainer" containerID="9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0" Nov 25 15:10:57 crc kubenswrapper[4879]: E1125 15:10:57.378335 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0\": container with ID starting with 9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0 not found: ID does not exist" containerID="9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.378383 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0"} err="failed to get container status \"9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0\": rpc error: code = NotFound desc = could not find container \"9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0\": container with ID starting with 9096ef50c7bf08f87e8e2e17e3849cae5ef48e0ca225601a0695933062f6d5a0 not found: ID does not exist" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.378415 4879 scope.go:117] "RemoveContainer" containerID="fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074" Nov 25 15:10:57 crc kubenswrapper[4879]: E1125 15:10:57.378720 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074\": container with ID starting with fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074 not found: ID does not exist" containerID="fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.378772 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074"} err="failed to get container status \"fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074\": rpc error: code = NotFound desc = could not find container \"fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074\": container with ID starting with fb81129d3a5f494fc04392a6164af8258748421706d49145a1478568324d3074 not found: ID does not exist" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.378807 4879 scope.go:117] "RemoveContainer" containerID="f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc" Nov 25 15:10:57 crc kubenswrapper[4879]: E1125 15:10:57.379405 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc\": container with ID starting with f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc not found: ID does not exist" containerID="f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.379447 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc"} err="failed to get container status \"f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc\": rpc error: code = NotFound desc = could not find container \"f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc\": container with ID starting with f5dadffb4a2c952d89f83d12f713f03daf9f1bbb6b5817e292421e21edf362fc not found: ID does not exist" Nov 25 15:10:57 crc kubenswrapper[4879]: I1125 15:10:57.654698 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" path="/var/lib/kubelet/pods/a5e87bea-0848-477e-b207-0aaac4e1e63a/volumes" Nov 25 15:11:17 crc kubenswrapper[4879]: I1125 15:11:17.408298 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:11:17 crc kubenswrapper[4879]: I1125 15:11:17.408738 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:11:47 crc kubenswrapper[4879]: I1125 15:11:47.409326 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:11:47 crc kubenswrapper[4879]: I1125 15:11:47.409898 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.409462 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.410790 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.410909 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.412091 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d93f5cfe8db7c71c027309f43f73ec932ef734072eb2e85e891da7db120da0e4"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.412276 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://d93f5cfe8db7c71c027309f43f73ec932ef734072eb2e85e891da7db120da0e4" gracePeriod=600 Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.862805 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="d93f5cfe8db7c71c027309f43f73ec932ef734072eb2e85e891da7db120da0e4" exitCode=0 Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.862909 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"d93f5cfe8db7c71c027309f43f73ec932ef734072eb2e85e891da7db120da0e4"} Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.863084 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f"} Nov 25 15:12:17 crc kubenswrapper[4879]: I1125 15:12:17.863109 4879 scope.go:117] "RemoveContainer" containerID="d404551d9bcb2d81e77e837cf6919d9d8bf3d70b2fbf8a718c8a637f43b080b9" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.033541 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tzw5v"] Nov 25 15:13:04 crc kubenswrapper[4879]: E1125 15:13:04.034372 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="extract-content" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.034387 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="extract-content" Nov 25 15:13:04 crc kubenswrapper[4879]: E1125 15:13:04.034395 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="registry-server" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.034401 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="registry-server" Nov 25 15:13:04 crc kubenswrapper[4879]: E1125 15:13:04.034416 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="extract-utilities" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.034424 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="extract-utilities" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.034597 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5e87bea-0848-477e-b207-0aaac4e1e63a" containerName="registry-server" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.036146 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.052072 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzw5v"] Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.139657 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-catalog-content\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.139832 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhzs8\" (UniqueName: \"kubernetes.io/projected/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-kube-api-access-lhzs8\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.139890 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-utilities\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.241301 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-catalog-content\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.241378 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhzs8\" (UniqueName: \"kubernetes.io/projected/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-kube-api-access-lhzs8\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.241401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-utilities\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.241867 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-utilities\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.241976 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-catalog-content\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.272185 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhzs8\" (UniqueName: \"kubernetes.io/projected/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-kube-api-access-lhzs8\") pod \"redhat-marketplace-tzw5v\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.361332 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:04 crc kubenswrapper[4879]: I1125 15:13:04.894028 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzw5v"] Nov 25 15:13:05 crc kubenswrapper[4879]: I1125 15:13:05.271818 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerStarted","Data":"4f3335f3e39c24ae19dbaf3dd3b4421a56c7b94f8ca4d03b0bc1bf668cbcca72"} Nov 25 15:13:05 crc kubenswrapper[4879]: I1125 15:13:05.272141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerStarted","Data":"361e3ec93ba365f3c7088ab9f8b38cf73f9df1cea4ea0d31ebc155d885ccd454"} Nov 25 15:13:06 crc kubenswrapper[4879]: I1125 15:13:06.279360 4879 generic.go:334] "Generic (PLEG): container finished" podID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerID="4f3335f3e39c24ae19dbaf3dd3b4421a56c7b94f8ca4d03b0bc1bf668cbcca72" exitCode=0 Nov 25 15:13:06 crc kubenswrapper[4879]: I1125 15:13:06.279402 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerDied","Data":"4f3335f3e39c24ae19dbaf3dd3b4421a56c7b94f8ca4d03b0bc1bf668cbcca72"} Nov 25 15:13:07 crc kubenswrapper[4879]: I1125 15:13:07.288206 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerStarted","Data":"c1e8aecd79e39a15e07cbabafe05a7b196a3331b95ccf0a5f6e06d405f8dc767"} Nov 25 15:13:08 crc kubenswrapper[4879]: I1125 15:13:08.297476 4879 generic.go:334] "Generic (PLEG): container finished" podID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerID="c1e8aecd79e39a15e07cbabafe05a7b196a3331b95ccf0a5f6e06d405f8dc767" exitCode=0 Nov 25 15:13:08 crc kubenswrapper[4879]: I1125 15:13:08.297541 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerDied","Data":"c1e8aecd79e39a15e07cbabafe05a7b196a3331b95ccf0a5f6e06d405f8dc767"} Nov 25 15:13:11 crc kubenswrapper[4879]: I1125 15:13:11.326557 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerStarted","Data":"99b8a0cc2a5ac7fba9768692d0353e329693b00bf30bdc7b0712674c5aaadba8"} Nov 25 15:13:11 crc kubenswrapper[4879]: I1125 15:13:11.348160 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tzw5v" podStartSLOduration=3.24379989 podStartE2EDuration="7.348125291s" podCreationTimestamp="2025-11-25 15:13:04 +0000 UTC" firstStartedPulling="2025-11-25 15:13:06.28091485 +0000 UTC m=+2877.884327921" lastFinishedPulling="2025-11-25 15:13:10.385240251 +0000 UTC m=+2881.988653322" observedRunningTime="2025-11-25 15:13:11.345457119 +0000 UTC m=+2882.948870190" watchObservedRunningTime="2025-11-25 15:13:11.348125291 +0000 UTC m=+2882.951538362" Nov 25 15:13:14 crc kubenswrapper[4879]: I1125 15:13:14.361792 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:14 crc kubenswrapper[4879]: I1125 15:13:14.362079 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:14 crc kubenswrapper[4879]: I1125 15:13:14.403919 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:15 crc kubenswrapper[4879]: I1125 15:13:15.394354 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:16 crc kubenswrapper[4879]: I1125 15:13:16.047358 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzw5v"] Nov 25 15:13:17 crc kubenswrapper[4879]: I1125 15:13:17.372554 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tzw5v" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="registry-server" containerID="cri-o://99b8a0cc2a5ac7fba9768692d0353e329693b00bf30bdc7b0712674c5aaadba8" gracePeriod=2 Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.383225 4879 generic.go:334] "Generic (PLEG): container finished" podID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerID="99b8a0cc2a5ac7fba9768692d0353e329693b00bf30bdc7b0712674c5aaadba8" exitCode=0 Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.383344 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerDied","Data":"99b8a0cc2a5ac7fba9768692d0353e329693b00bf30bdc7b0712674c5aaadba8"} Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.383660 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tzw5v" event={"ID":"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999","Type":"ContainerDied","Data":"361e3ec93ba365f3c7088ab9f8b38cf73f9df1cea4ea0d31ebc155d885ccd454"} Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.383681 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="361e3ec93ba365f3c7088ab9f8b38cf73f9df1cea4ea0d31ebc155d885ccd454" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.428844 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.501784 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-catalog-content\") pod \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.501868 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhzs8\" (UniqueName: \"kubernetes.io/projected/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-kube-api-access-lhzs8\") pod \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.501935 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-utilities\") pod \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\" (UID: \"3eaf0bd2-c206-4f35-9b6a-1353d4e0f999\") " Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.503106 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-utilities" (OuterVolumeSpecName: "utilities") pod "3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" (UID: "3eaf0bd2-c206-4f35-9b6a-1353d4e0f999"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.510516 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-kube-api-access-lhzs8" (OuterVolumeSpecName: "kube-api-access-lhzs8") pod "3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" (UID: "3eaf0bd2-c206-4f35-9b6a-1353d4e0f999"). InnerVolumeSpecName "kube-api-access-lhzs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.521705 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" (UID: "3eaf0bd2-c206-4f35-9b6a-1353d4e0f999"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.603587 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.603624 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhzs8\" (UniqueName: \"kubernetes.io/projected/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-kube-api-access-lhzs8\") on node \"crc\" DevicePath \"\"" Nov 25 15:13:18 crc kubenswrapper[4879]: I1125 15:13:18.603640 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:13:19 crc kubenswrapper[4879]: I1125 15:13:19.392179 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tzw5v" Nov 25 15:13:19 crc kubenswrapper[4879]: I1125 15:13:19.432464 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzw5v"] Nov 25 15:13:19 crc kubenswrapper[4879]: I1125 15:13:19.440463 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tzw5v"] Nov 25 15:13:19 crc kubenswrapper[4879]: I1125 15:13:19.655052 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" path="/var/lib/kubelet/pods/3eaf0bd2-c206-4f35-9b6a-1353d4e0f999/volumes" Nov 25 15:14:17 crc kubenswrapper[4879]: I1125 15:14:17.408796 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:14:17 crc kubenswrapper[4879]: I1125 15:14:17.409479 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:14:47 crc kubenswrapper[4879]: I1125 15:14:47.409343 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:14:47 crc kubenswrapper[4879]: I1125 15:14:47.410214 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.142733 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt"] Nov 25 15:15:00 crc kubenswrapper[4879]: E1125 15:15:00.143638 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="extract-utilities" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.143679 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="extract-utilities" Nov 25 15:15:00 crc kubenswrapper[4879]: E1125 15:15:00.143694 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="registry-server" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.143703 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="registry-server" Nov 25 15:15:00 crc kubenswrapper[4879]: E1125 15:15:00.143718 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="extract-content" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.143726 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="extract-content" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.143905 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3eaf0bd2-c206-4f35-9b6a-1353d4e0f999" containerName="registry-server" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.144494 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.148185 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.148238 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.157461 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt"] Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.267819 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrh2k\" (UniqueName: \"kubernetes.io/projected/6120e86e-3bfb-4c54-a197-86517ce67e78-kube-api-access-vrh2k\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.267870 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6120e86e-3bfb-4c54-a197-86517ce67e78-secret-volume\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.267937 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6120e86e-3bfb-4c54-a197-86517ce67e78-config-volume\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.369423 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrh2k\" (UniqueName: \"kubernetes.io/projected/6120e86e-3bfb-4c54-a197-86517ce67e78-kube-api-access-vrh2k\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.369472 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6120e86e-3bfb-4c54-a197-86517ce67e78-secret-volume\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.369532 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6120e86e-3bfb-4c54-a197-86517ce67e78-config-volume\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.370418 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6120e86e-3bfb-4c54-a197-86517ce67e78-config-volume\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.376502 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6120e86e-3bfb-4c54-a197-86517ce67e78-secret-volume\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.386162 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrh2k\" (UniqueName: \"kubernetes.io/projected/6120e86e-3bfb-4c54-a197-86517ce67e78-kube-api-access-vrh2k\") pod \"collect-profiles-29401395-rzrqt\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.465150 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:00 crc kubenswrapper[4879]: I1125 15:15:00.919636 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt"] Nov 25 15:15:01 crc kubenswrapper[4879]: I1125 15:15:01.231727 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" event={"ID":"6120e86e-3bfb-4c54-a197-86517ce67e78","Type":"ContainerStarted","Data":"f8d19dc2d3bcf3d4780d842233ea695dc8c6ca07772ea232477e3e9c41c291df"} Nov 25 15:15:01 crc kubenswrapper[4879]: I1125 15:15:01.232102 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" event={"ID":"6120e86e-3bfb-4c54-a197-86517ce67e78","Type":"ContainerStarted","Data":"da1dcc0f09fda6fc606035f03f4e35a617a80acc3934dec3f81851363499728e"} Nov 25 15:15:01 crc kubenswrapper[4879]: I1125 15:15:01.256524 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" podStartSLOduration=1.256504009 podStartE2EDuration="1.256504009s" podCreationTimestamp="2025-11-25 15:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:15:01.25207303 +0000 UTC m=+2992.855486101" watchObservedRunningTime="2025-11-25 15:15:01.256504009 +0000 UTC m=+2992.859917080" Nov 25 15:15:02 crc kubenswrapper[4879]: I1125 15:15:02.241609 4879 generic.go:334] "Generic (PLEG): container finished" podID="6120e86e-3bfb-4c54-a197-86517ce67e78" containerID="f8d19dc2d3bcf3d4780d842233ea695dc8c6ca07772ea232477e3e9c41c291df" exitCode=0 Nov 25 15:15:02 crc kubenswrapper[4879]: I1125 15:15:02.241671 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" event={"ID":"6120e86e-3bfb-4c54-a197-86517ce67e78","Type":"ContainerDied","Data":"f8d19dc2d3bcf3d4780d842233ea695dc8c6ca07772ea232477e3e9c41c291df"} Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.562804 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.618485 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6120e86e-3bfb-4c54-a197-86517ce67e78-config-volume\") pod \"6120e86e-3bfb-4c54-a197-86517ce67e78\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.618534 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrh2k\" (UniqueName: \"kubernetes.io/projected/6120e86e-3bfb-4c54-a197-86517ce67e78-kube-api-access-vrh2k\") pod \"6120e86e-3bfb-4c54-a197-86517ce67e78\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.618608 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6120e86e-3bfb-4c54-a197-86517ce67e78-secret-volume\") pod \"6120e86e-3bfb-4c54-a197-86517ce67e78\" (UID: \"6120e86e-3bfb-4c54-a197-86517ce67e78\") " Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.619797 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6120e86e-3bfb-4c54-a197-86517ce67e78-config-volume" (OuterVolumeSpecName: "config-volume") pod "6120e86e-3bfb-4c54-a197-86517ce67e78" (UID: "6120e86e-3bfb-4c54-a197-86517ce67e78"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.624530 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6120e86e-3bfb-4c54-a197-86517ce67e78-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6120e86e-3bfb-4c54-a197-86517ce67e78" (UID: "6120e86e-3bfb-4c54-a197-86517ce67e78"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.624604 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6120e86e-3bfb-4c54-a197-86517ce67e78-kube-api-access-vrh2k" (OuterVolumeSpecName: "kube-api-access-vrh2k") pod "6120e86e-3bfb-4c54-a197-86517ce67e78" (UID: "6120e86e-3bfb-4c54-a197-86517ce67e78"). InnerVolumeSpecName "kube-api-access-vrh2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.720061 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6120e86e-3bfb-4c54-a197-86517ce67e78-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.720092 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6120e86e-3bfb-4c54-a197-86517ce67e78-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:15:03 crc kubenswrapper[4879]: I1125 15:15:03.720105 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrh2k\" (UniqueName: \"kubernetes.io/projected/6120e86e-3bfb-4c54-a197-86517ce67e78-kube-api-access-vrh2k\") on node \"crc\" DevicePath \"\"" Nov 25 15:15:04 crc kubenswrapper[4879]: I1125 15:15:04.264373 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" event={"ID":"6120e86e-3bfb-4c54-a197-86517ce67e78","Type":"ContainerDied","Data":"da1dcc0f09fda6fc606035f03f4e35a617a80acc3934dec3f81851363499728e"} Nov 25 15:15:04 crc kubenswrapper[4879]: I1125 15:15:04.264637 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da1dcc0f09fda6fc606035f03f4e35a617a80acc3934dec3f81851363499728e" Nov 25 15:15:04 crc kubenswrapper[4879]: I1125 15:15:04.264510 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt" Nov 25 15:15:04 crc kubenswrapper[4879]: I1125 15:15:04.320062 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6"] Nov 25 15:15:04 crc kubenswrapper[4879]: I1125 15:15:04.325961 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401350-mn6j6"] Nov 25 15:15:05 crc kubenswrapper[4879]: I1125 15:15:05.663687 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36609869-72a6-4813-bc8b-ea8c53cedf05" path="/var/lib/kubelet/pods/36609869-72a6-4813-bc8b-ea8c53cedf05/volumes" Nov 25 15:15:17 crc kubenswrapper[4879]: I1125 15:15:17.408761 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:15:17 crc kubenswrapper[4879]: I1125 15:15:17.409309 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:15:17 crc kubenswrapper[4879]: I1125 15:15:17.409350 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:15:17 crc kubenswrapper[4879]: I1125 15:15:17.409933 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:15:17 crc kubenswrapper[4879]: I1125 15:15:17.409985 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" gracePeriod=600 Nov 25 15:15:18 crc kubenswrapper[4879]: E1125 15:15:18.075319 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:15:18 crc kubenswrapper[4879]: I1125 15:15:18.409457 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" exitCode=0 Nov 25 15:15:18 crc kubenswrapper[4879]: I1125 15:15:18.409511 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f"} Nov 25 15:15:18 crc kubenswrapper[4879]: I1125 15:15:18.409563 4879 scope.go:117] "RemoveContainer" containerID="d93f5cfe8db7c71c027309f43f73ec932ef734072eb2e85e891da7db120da0e4" Nov 25 15:15:18 crc kubenswrapper[4879]: I1125 15:15:18.410021 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:15:18 crc kubenswrapper[4879]: E1125 15:15:18.410656 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:15:33 crc kubenswrapper[4879]: I1125 15:15:33.645892 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:15:33 crc kubenswrapper[4879]: E1125 15:15:33.646855 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:15:41 crc kubenswrapper[4879]: I1125 15:15:41.320975 4879 scope.go:117] "RemoveContainer" containerID="bfd36f9216857cd722ada09d2056012fe10804d8a8ffa1764defc018b6a0a8dc" Nov 25 15:15:48 crc kubenswrapper[4879]: I1125 15:15:48.644377 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:15:48 crc kubenswrapper[4879]: E1125 15:15:48.646050 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.101368 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p7tnn"] Nov 25 15:15:52 crc kubenswrapper[4879]: E1125 15:15:52.102037 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6120e86e-3bfb-4c54-a197-86517ce67e78" containerName="collect-profiles" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.102052 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6120e86e-3bfb-4c54-a197-86517ce67e78" containerName="collect-profiles" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.102253 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6120e86e-3bfb-4c54-a197-86517ce67e78" containerName="collect-profiles" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.103341 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.114573 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p7tnn"] Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.229977 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr5c8\" (UniqueName: \"kubernetes.io/projected/41f47924-5139-4705-b290-6c1c738a53a7-kube-api-access-lr5c8\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.230324 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-catalog-content\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.230441 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-utilities\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.303893 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d7h2c"] Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.306619 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.313410 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d7h2c"] Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.331303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-catalog-content\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.331597 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-utilities\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.331740 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-catalog-content\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.331747 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr5c8\" (UniqueName: \"kubernetes.io/projected/41f47924-5139-4705-b290-6c1c738a53a7-kube-api-access-lr5c8\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.332092 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-utilities\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.362910 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr5c8\" (UniqueName: \"kubernetes.io/projected/41f47924-5139-4705-b290-6c1c738a53a7-kube-api-access-lr5c8\") pod \"certified-operators-p7tnn\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.423406 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.433381 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pgd5\" (UniqueName: \"kubernetes.io/projected/29ed20e9-a80c-4c4d-9ff6-474496f0260d-kube-api-access-4pgd5\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.433824 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-utilities\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.433842 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-catalog-content\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.536211 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pgd5\" (UniqueName: \"kubernetes.io/projected/29ed20e9-a80c-4c4d-9ff6-474496f0260d-kube-api-access-4pgd5\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.536306 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-utilities\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.536330 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-catalog-content\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.536948 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-catalog-content\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.537023 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-utilities\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.554784 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pgd5\" (UniqueName: \"kubernetes.io/projected/29ed20e9-a80c-4c4d-9ff6-474496f0260d-kube-api-access-4pgd5\") pod \"community-operators-d7h2c\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.625469 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:15:52 crc kubenswrapper[4879]: I1125 15:15:52.797236 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p7tnn"] Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.222461 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d7h2c"] Nov 25 15:15:53 crc kubenswrapper[4879]: W1125 15:15:53.225756 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29ed20e9_a80c_4c4d_9ff6_474496f0260d.slice/crio-f51037ae48547dd8fed0ab6ed1d6c7abd2f22a34bd36e9c3ff0318a805353648 WatchSource:0}: Error finding container f51037ae48547dd8fed0ab6ed1d6c7abd2f22a34bd36e9c3ff0318a805353648: Status 404 returned error can't find the container with id f51037ae48547dd8fed0ab6ed1d6c7abd2f22a34bd36e9c3ff0318a805353648 Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.724958 4879 generic.go:334] "Generic (PLEG): container finished" podID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerID="8efae558613dce00c5edefa1860ecf6ee6aafc2bc7c10fd04fd19244c3861a91" exitCode=0 Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.725040 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7h2c" event={"ID":"29ed20e9-a80c-4c4d-9ff6-474496f0260d","Type":"ContainerDied","Data":"8efae558613dce00c5edefa1860ecf6ee6aafc2bc7c10fd04fd19244c3861a91"} Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.725744 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7h2c" event={"ID":"29ed20e9-a80c-4c4d-9ff6-474496f0260d","Type":"ContainerStarted","Data":"f51037ae48547dd8fed0ab6ed1d6c7abd2f22a34bd36e9c3ff0318a805353648"} Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.729023 4879 generic.go:334] "Generic (PLEG): container finished" podID="41f47924-5139-4705-b290-6c1c738a53a7" containerID="a677fe0651584180798808d59f83f44cb51bdf688f62bb71cd763e8c5b237b6a" exitCode=0 Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.729057 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7tnn" event={"ID":"41f47924-5139-4705-b290-6c1c738a53a7","Type":"ContainerDied","Data":"a677fe0651584180798808d59f83f44cb51bdf688f62bb71cd763e8c5b237b6a"} Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.729077 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7tnn" event={"ID":"41f47924-5139-4705-b290-6c1c738a53a7","Type":"ContainerStarted","Data":"62850763e9434f1ed06b47713222ec01c9d09d39009c10864a2aadaaa7fabff2"} Nov 25 15:15:53 crc kubenswrapper[4879]: I1125 15:15:53.730169 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:15:54 crc kubenswrapper[4879]: I1125 15:15:54.740238 4879 generic.go:334] "Generic (PLEG): container finished" podID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerID="0b29e6949ad56b534f4d71eea701390c13d92883ff4763df679802995c9f9efc" exitCode=0 Nov 25 15:15:54 crc kubenswrapper[4879]: I1125 15:15:54.741233 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7h2c" event={"ID":"29ed20e9-a80c-4c4d-9ff6-474496f0260d","Type":"ContainerDied","Data":"0b29e6949ad56b534f4d71eea701390c13d92883ff4763df679802995c9f9efc"} Nov 25 15:15:54 crc kubenswrapper[4879]: I1125 15:15:54.746595 4879 generic.go:334] "Generic (PLEG): container finished" podID="41f47924-5139-4705-b290-6c1c738a53a7" containerID="5728ac206cbf1efbf074496df5f4f65a474a6b62c5eb9bb786fcf533c296f58c" exitCode=0 Nov 25 15:15:54 crc kubenswrapper[4879]: I1125 15:15:54.746628 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7tnn" event={"ID":"41f47924-5139-4705-b290-6c1c738a53a7","Type":"ContainerDied","Data":"5728ac206cbf1efbf074496df5f4f65a474a6b62c5eb9bb786fcf533c296f58c"} Nov 25 15:15:55 crc kubenswrapper[4879]: I1125 15:15:55.761801 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7h2c" event={"ID":"29ed20e9-a80c-4c4d-9ff6-474496f0260d","Type":"ContainerStarted","Data":"2204ccea29be48c19ed45e90e25c850fe9946ff84d5c88b570d4afdf08ce8ed9"} Nov 25 15:15:55 crc kubenswrapper[4879]: I1125 15:15:55.764636 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7tnn" event={"ID":"41f47924-5139-4705-b290-6c1c738a53a7","Type":"ContainerStarted","Data":"83e2c974fbaab9c1dfe0ee21e06416eb436c0a294492d10ab3c8c918b0ba4aa2"} Nov 25 15:15:55 crc kubenswrapper[4879]: I1125 15:15:55.784140 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d7h2c" podStartSLOduration=2.33135892 podStartE2EDuration="3.784102893s" podCreationTimestamp="2025-11-25 15:15:52 +0000 UTC" firstStartedPulling="2025-11-25 15:15:53.72988135 +0000 UTC m=+3045.333294421" lastFinishedPulling="2025-11-25 15:15:55.182625323 +0000 UTC m=+3046.786038394" observedRunningTime="2025-11-25 15:15:55.779583101 +0000 UTC m=+3047.382996172" watchObservedRunningTime="2025-11-25 15:15:55.784102893 +0000 UTC m=+3047.387515964" Nov 25 15:15:55 crc kubenswrapper[4879]: I1125 15:15:55.803808 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p7tnn" podStartSLOduration=2.378585817 podStartE2EDuration="3.803792815s" podCreationTimestamp="2025-11-25 15:15:52 +0000 UTC" firstStartedPulling="2025-11-25 15:15:53.731992078 +0000 UTC m=+3045.335405149" lastFinishedPulling="2025-11-25 15:15:55.157199076 +0000 UTC m=+3046.760612147" observedRunningTime="2025-11-25 15:15:55.7984049 +0000 UTC m=+3047.401817991" watchObservedRunningTime="2025-11-25 15:15:55.803792815 +0000 UTC m=+3047.407205876" Nov 25 15:16:00 crc kubenswrapper[4879]: I1125 15:16:00.645254 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:16:00 crc kubenswrapper[4879]: E1125 15:16:00.646341 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.423572 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.423976 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.492059 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.625846 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.625923 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.695302 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.867019 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:16:02 crc kubenswrapper[4879]: I1125 15:16:02.870664 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:16:05 crc kubenswrapper[4879]: I1125 15:16:05.497525 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p7tnn"] Nov 25 15:16:05 crc kubenswrapper[4879]: I1125 15:16:05.498690 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p7tnn" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="registry-server" containerID="cri-o://83e2c974fbaab9c1dfe0ee21e06416eb436c0a294492d10ab3c8c918b0ba4aa2" gracePeriod=2 Nov 25 15:16:06 crc kubenswrapper[4879]: I1125 15:16:06.093450 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d7h2c"] Nov 25 15:16:06 crc kubenswrapper[4879]: I1125 15:16:06.093679 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d7h2c" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="registry-server" containerID="cri-o://2204ccea29be48c19ed45e90e25c850fe9946ff84d5c88b570d4afdf08ce8ed9" gracePeriod=2 Nov 25 15:16:06 crc kubenswrapper[4879]: I1125 15:16:06.876168 4879 generic.go:334] "Generic (PLEG): container finished" podID="41f47924-5139-4705-b290-6c1c738a53a7" containerID="83e2c974fbaab9c1dfe0ee21e06416eb436c0a294492d10ab3c8c918b0ba4aa2" exitCode=0 Nov 25 15:16:06 crc kubenswrapper[4879]: I1125 15:16:06.876183 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7tnn" event={"ID":"41f47924-5139-4705-b290-6c1c738a53a7","Type":"ContainerDied","Data":"83e2c974fbaab9c1dfe0ee21e06416eb436c0a294492d10ab3c8c918b0ba4aa2"} Nov 25 15:16:06 crc kubenswrapper[4879]: I1125 15:16:06.879850 4879 generic.go:334] "Generic (PLEG): container finished" podID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerID="2204ccea29be48c19ed45e90e25c850fe9946ff84d5c88b570d4afdf08ce8ed9" exitCode=0 Nov 25 15:16:06 crc kubenswrapper[4879]: I1125 15:16:06.880011 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7h2c" event={"ID":"29ed20e9-a80c-4c4d-9ff6-474496f0260d","Type":"ContainerDied","Data":"2204ccea29be48c19ed45e90e25c850fe9946ff84d5c88b570d4afdf08ce8ed9"} Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.143366 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.147862 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.213776 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pgd5\" (UniqueName: \"kubernetes.io/projected/29ed20e9-a80c-4c4d-9ff6-474496f0260d-kube-api-access-4pgd5\") pod \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.213911 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-utilities\") pod \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.213957 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-catalog-content\") pod \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\" (UID: \"29ed20e9-a80c-4c4d-9ff6-474496f0260d\") " Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.213992 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr5c8\" (UniqueName: \"kubernetes.io/projected/41f47924-5139-4705-b290-6c1c738a53a7-kube-api-access-lr5c8\") pod \"41f47924-5139-4705-b290-6c1c738a53a7\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.214084 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-catalog-content\") pod \"41f47924-5139-4705-b290-6c1c738a53a7\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.214139 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-utilities\") pod \"41f47924-5139-4705-b290-6c1c738a53a7\" (UID: \"41f47924-5139-4705-b290-6c1c738a53a7\") " Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.216603 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-utilities" (OuterVolumeSpecName: "utilities") pod "41f47924-5139-4705-b290-6c1c738a53a7" (UID: "41f47924-5139-4705-b290-6c1c738a53a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.217244 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-utilities" (OuterVolumeSpecName: "utilities") pod "29ed20e9-a80c-4c4d-9ff6-474496f0260d" (UID: "29ed20e9-a80c-4c4d-9ff6-474496f0260d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.222963 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f47924-5139-4705-b290-6c1c738a53a7-kube-api-access-lr5c8" (OuterVolumeSpecName: "kube-api-access-lr5c8") pod "41f47924-5139-4705-b290-6c1c738a53a7" (UID: "41f47924-5139-4705-b290-6c1c738a53a7"). InnerVolumeSpecName "kube-api-access-lr5c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.224158 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ed20e9-a80c-4c4d-9ff6-474496f0260d-kube-api-access-4pgd5" (OuterVolumeSpecName: "kube-api-access-4pgd5") pod "29ed20e9-a80c-4c4d-9ff6-474496f0260d" (UID: "29ed20e9-a80c-4c4d-9ff6-474496f0260d"). InnerVolumeSpecName "kube-api-access-4pgd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.273531 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29ed20e9-a80c-4c4d-9ff6-474496f0260d" (UID: "29ed20e9-a80c-4c4d-9ff6-474496f0260d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.276469 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41f47924-5139-4705-b290-6c1c738a53a7" (UID: "41f47924-5139-4705-b290-6c1c738a53a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.317573 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr5c8\" (UniqueName: \"kubernetes.io/projected/41f47924-5139-4705-b290-6c1c738a53a7-kube-api-access-lr5c8\") on node \"crc\" DevicePath \"\"" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.317613 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.317662 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41f47924-5139-4705-b290-6c1c738a53a7-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.317675 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pgd5\" (UniqueName: \"kubernetes.io/projected/29ed20e9-a80c-4c4d-9ff6-474496f0260d-kube-api-access-4pgd5\") on node \"crc\" DevicePath \"\"" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.317686 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.317697 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29ed20e9-a80c-4c4d-9ff6-474496f0260d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.889167 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p7tnn" event={"ID":"41f47924-5139-4705-b290-6c1c738a53a7","Type":"ContainerDied","Data":"62850763e9434f1ed06b47713222ec01c9d09d39009c10864a2aadaaa7fabff2"} Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.889240 4879 scope.go:117] "RemoveContainer" containerID="83e2c974fbaab9c1dfe0ee21e06416eb436c0a294492d10ab3c8c918b0ba4aa2" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.889276 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p7tnn" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.892823 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d7h2c" event={"ID":"29ed20e9-a80c-4c4d-9ff6-474496f0260d","Type":"ContainerDied","Data":"f51037ae48547dd8fed0ab6ed1d6c7abd2f22a34bd36e9c3ff0318a805353648"} Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.893025 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d7h2c" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.916578 4879 scope.go:117] "RemoveContainer" containerID="5728ac206cbf1efbf074496df5f4f65a474a6b62c5eb9bb786fcf533c296f58c" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.920406 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p7tnn"] Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.938245 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p7tnn"] Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.938339 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d7h2c"] Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.945591 4879 scope.go:117] "RemoveContainer" containerID="a677fe0651584180798808d59f83f44cb51bdf688f62bb71cd763e8c5b237b6a" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.945688 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d7h2c"] Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.964891 4879 scope.go:117] "RemoveContainer" containerID="2204ccea29be48c19ed45e90e25c850fe9946ff84d5c88b570d4afdf08ce8ed9" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.978798 4879 scope.go:117] "RemoveContainer" containerID="0b29e6949ad56b534f4d71eea701390c13d92883ff4763df679802995c9f9efc" Nov 25 15:16:07 crc kubenswrapper[4879]: I1125 15:16:07.998713 4879 scope.go:117] "RemoveContainer" containerID="8efae558613dce00c5edefa1860ecf6ee6aafc2bc7c10fd04fd19244c3861a91" Nov 25 15:16:09 crc kubenswrapper[4879]: I1125 15:16:09.655080 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" path="/var/lib/kubelet/pods/29ed20e9-a80c-4c4d-9ff6-474496f0260d/volumes" Nov 25 15:16:09 crc kubenswrapper[4879]: I1125 15:16:09.656472 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41f47924-5139-4705-b290-6c1c738a53a7" path="/var/lib/kubelet/pods/41f47924-5139-4705-b290-6c1c738a53a7/volumes" Nov 25 15:16:13 crc kubenswrapper[4879]: I1125 15:16:13.645231 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:16:13 crc kubenswrapper[4879]: E1125 15:16:13.645911 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:16:28 crc kubenswrapper[4879]: I1125 15:16:28.644539 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:16:28 crc kubenswrapper[4879]: E1125 15:16:28.645331 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:16:42 crc kubenswrapper[4879]: I1125 15:16:42.644763 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:16:42 crc kubenswrapper[4879]: E1125 15:16:42.645724 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:16:56 crc kubenswrapper[4879]: I1125 15:16:56.644887 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:16:56 crc kubenswrapper[4879]: E1125 15:16:56.645632 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:17:08 crc kubenswrapper[4879]: I1125 15:17:08.645924 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:17:08 crc kubenswrapper[4879]: E1125 15:17:08.647390 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:17:20 crc kubenswrapper[4879]: I1125 15:17:20.645808 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:17:20 crc kubenswrapper[4879]: E1125 15:17:20.647306 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:17:32 crc kubenswrapper[4879]: I1125 15:17:32.644745 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:17:32 crc kubenswrapper[4879]: E1125 15:17:32.645459 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:17:44 crc kubenswrapper[4879]: I1125 15:17:44.645047 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:17:44 crc kubenswrapper[4879]: E1125 15:17:44.646327 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:17:56 crc kubenswrapper[4879]: I1125 15:17:56.644936 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:17:56 crc kubenswrapper[4879]: E1125 15:17:56.645774 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:18:07 crc kubenswrapper[4879]: I1125 15:18:07.645360 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:18:07 crc kubenswrapper[4879]: E1125 15:18:07.646116 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:18:21 crc kubenswrapper[4879]: I1125 15:18:21.644916 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:18:21 crc kubenswrapper[4879]: E1125 15:18:21.645737 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:18:36 crc kubenswrapper[4879]: I1125 15:18:36.644653 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:18:36 crc kubenswrapper[4879]: E1125 15:18:36.645292 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:18:49 crc kubenswrapper[4879]: I1125 15:18:49.650879 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:18:49 crc kubenswrapper[4879]: E1125 15:18:49.651684 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:19:00 crc kubenswrapper[4879]: I1125 15:19:00.645271 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:19:00 crc kubenswrapper[4879]: E1125 15:19:00.647194 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:19:15 crc kubenswrapper[4879]: I1125 15:19:15.644681 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:19:15 crc kubenswrapper[4879]: E1125 15:19:15.645382 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:19:26 crc kubenswrapper[4879]: I1125 15:19:26.644304 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:19:26 crc kubenswrapper[4879]: E1125 15:19:26.645015 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:19:40 crc kubenswrapper[4879]: I1125 15:19:40.644140 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:19:40 crc kubenswrapper[4879]: E1125 15:19:40.644891 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:19:41 crc kubenswrapper[4879]: I1125 15:19:41.444903 4879 scope.go:117] "RemoveContainer" containerID="99b8a0cc2a5ac7fba9768692d0353e329693b00bf30bdc7b0712674c5aaadba8" Nov 25 15:19:41 crc kubenswrapper[4879]: I1125 15:19:41.476189 4879 scope.go:117] "RemoveContainer" containerID="4f3335f3e39c24ae19dbaf3dd3b4421a56c7b94f8ca4d03b0bc1bf668cbcca72" Nov 25 15:19:41 crc kubenswrapper[4879]: I1125 15:19:41.494571 4879 scope.go:117] "RemoveContainer" containerID="c1e8aecd79e39a15e07cbabafe05a7b196a3331b95ccf0a5f6e06d405f8dc767" Nov 25 15:19:51 crc kubenswrapper[4879]: I1125 15:19:51.645082 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:19:51 crc kubenswrapper[4879]: E1125 15:19:51.645789 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:20:02 crc kubenswrapper[4879]: I1125 15:20:02.645467 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:20:02 crc kubenswrapper[4879]: E1125 15:20:02.646486 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:20:14 crc kubenswrapper[4879]: I1125 15:20:14.645562 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:20:14 crc kubenswrapper[4879]: E1125 15:20:14.646390 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:20:26 crc kubenswrapper[4879]: I1125 15:20:26.644893 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:20:27 crc kubenswrapper[4879]: I1125 15:20:27.043881 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"5b33f8a28bad6d5ae8e454d323ebc77517fd2a0380847605d4b69853e3f15879"} Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.896730 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fct7l"] Nov 25 15:21:57 crc kubenswrapper[4879]: E1125 15:21:57.898716 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="extract-content" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.898745 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="extract-content" Nov 25 15:21:57 crc kubenswrapper[4879]: E1125 15:21:57.898760 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="registry-server" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.898770 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="registry-server" Nov 25 15:21:57 crc kubenswrapper[4879]: E1125 15:21:57.898784 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="extract-utilities" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.898796 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="extract-utilities" Nov 25 15:21:57 crc kubenswrapper[4879]: E1125 15:21:57.898826 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="registry-server" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.898835 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="registry-server" Nov 25 15:21:57 crc kubenswrapper[4879]: E1125 15:21:57.898876 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="extract-utilities" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.898889 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="extract-utilities" Nov 25 15:21:57 crc kubenswrapper[4879]: E1125 15:21:57.898950 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="extract-content" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.898964 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="extract-content" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.899227 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="41f47924-5139-4705-b290-6c1c738a53a7" containerName="registry-server" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.899275 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ed20e9-a80c-4c4d-9ff6-474496f0260d" containerName="registry-server" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.901451 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.913401 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fct7l"] Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.995265 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-utilities\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.996075 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z5hk\" (UniqueName: \"kubernetes.io/projected/12f8d906-802f-4a83-a324-f8a8989472e2-kube-api-access-8z5hk\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:57 crc kubenswrapper[4879]: I1125 15:21:57.996293 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-catalog-content\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.098154 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-utilities\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.098219 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z5hk\" (UniqueName: \"kubernetes.io/projected/12f8d906-802f-4a83-a324-f8a8989472e2-kube-api-access-8z5hk\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.098249 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-catalog-content\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.098808 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-catalog-content\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.098894 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-utilities\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.121057 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z5hk\" (UniqueName: \"kubernetes.io/projected/12f8d906-802f-4a83-a324-f8a8989472e2-kube-api-access-8z5hk\") pod \"redhat-operators-fct7l\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.231838 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.640345 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fct7l"] Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.838363 4879 generic.go:334] "Generic (PLEG): container finished" podID="12f8d906-802f-4a83-a324-f8a8989472e2" containerID="2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa" exitCode=0 Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.838411 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerDied","Data":"2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa"} Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.838441 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerStarted","Data":"02c6187d3349fbf5cb081b2c992e85f422d7aae4d974a177df8417fca3c8b530"} Nov 25 15:21:58 crc kubenswrapper[4879]: I1125 15:21:58.840084 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:22:00 crc kubenswrapper[4879]: I1125 15:22:00.861753 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerStarted","Data":"3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e"} Nov 25 15:22:01 crc kubenswrapper[4879]: I1125 15:22:01.874637 4879 generic.go:334] "Generic (PLEG): container finished" podID="12f8d906-802f-4a83-a324-f8a8989472e2" containerID="3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e" exitCode=0 Nov 25 15:22:01 crc kubenswrapper[4879]: I1125 15:22:01.874795 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerDied","Data":"3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e"} Nov 25 15:22:02 crc kubenswrapper[4879]: I1125 15:22:02.885377 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerStarted","Data":"a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4"} Nov 25 15:22:02 crc kubenswrapper[4879]: I1125 15:22:02.911473 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fct7l" podStartSLOduration=2.392157271 podStartE2EDuration="5.911446976s" podCreationTimestamp="2025-11-25 15:21:57 +0000 UTC" firstStartedPulling="2025-11-25 15:21:58.839807596 +0000 UTC m=+3410.443220667" lastFinishedPulling="2025-11-25 15:22:02.359097291 +0000 UTC m=+3413.962510372" observedRunningTime="2025-11-25 15:22:02.908066369 +0000 UTC m=+3414.511479450" watchObservedRunningTime="2025-11-25 15:22:02.911446976 +0000 UTC m=+3414.514860047" Nov 25 15:22:08 crc kubenswrapper[4879]: I1125 15:22:08.232832 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:22:08 crc kubenswrapper[4879]: I1125 15:22:08.233201 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:22:08 crc kubenswrapper[4879]: I1125 15:22:08.271919 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:22:09 crc kubenswrapper[4879]: I1125 15:22:09.006735 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:22:09 crc kubenswrapper[4879]: I1125 15:22:09.087626 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fct7l"] Nov 25 15:22:10 crc kubenswrapper[4879]: I1125 15:22:10.965521 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fct7l" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="registry-server" containerID="cri-o://a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4" gracePeriod=2 Nov 25 15:22:11 crc kubenswrapper[4879]: E1125 15:22:11.155587 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12f8d906_802f_4a83_a324_f8a8989472e2.slice/crio-a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12f8d906_802f_4a83_a324_f8a8989472e2.slice/crio-conmon-a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4.scope\": RecentStats: unable to find data in memory cache]" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.346392 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.398382 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-utilities\") pod \"12f8d906-802f-4a83-a324-f8a8989472e2\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.398432 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-catalog-content\") pod \"12f8d906-802f-4a83-a324-f8a8989472e2\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.398586 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8z5hk\" (UniqueName: \"kubernetes.io/projected/12f8d906-802f-4a83-a324-f8a8989472e2-kube-api-access-8z5hk\") pod \"12f8d906-802f-4a83-a324-f8a8989472e2\" (UID: \"12f8d906-802f-4a83-a324-f8a8989472e2\") " Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.399660 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-utilities" (OuterVolumeSpecName: "utilities") pod "12f8d906-802f-4a83-a324-f8a8989472e2" (UID: "12f8d906-802f-4a83-a324-f8a8989472e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.403736 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12f8d906-802f-4a83-a324-f8a8989472e2-kube-api-access-8z5hk" (OuterVolumeSpecName: "kube-api-access-8z5hk") pod "12f8d906-802f-4a83-a324-f8a8989472e2" (UID: "12f8d906-802f-4a83-a324-f8a8989472e2"). InnerVolumeSpecName "kube-api-access-8z5hk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.500602 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.500639 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8z5hk\" (UniqueName: \"kubernetes.io/projected/12f8d906-802f-4a83-a324-f8a8989472e2-kube-api-access-8z5hk\") on node \"crc\" DevicePath \"\"" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.973894 4879 generic.go:334] "Generic (PLEG): container finished" podID="12f8d906-802f-4a83-a324-f8a8989472e2" containerID="a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4" exitCode=0 Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.973970 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerDied","Data":"a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4"} Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.974001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fct7l" event={"ID":"12f8d906-802f-4a83-a324-f8a8989472e2","Type":"ContainerDied","Data":"02c6187d3349fbf5cb081b2c992e85f422d7aae4d974a177df8417fca3c8b530"} Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.974001 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fct7l" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.974022 4879 scope.go:117] "RemoveContainer" containerID="a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4" Nov 25 15:22:11 crc kubenswrapper[4879]: I1125 15:22:11.994490 4879 scope.go:117] "RemoveContainer" containerID="3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.011963 4879 scope.go:117] "RemoveContainer" containerID="2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.039690 4879 scope.go:117] "RemoveContainer" containerID="a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4" Nov 25 15:22:12 crc kubenswrapper[4879]: E1125 15:22:12.040821 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4\": container with ID starting with a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4 not found: ID does not exist" containerID="a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.040863 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4"} err="failed to get container status \"a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4\": rpc error: code = NotFound desc = could not find container \"a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4\": container with ID starting with a1d9f6332da59d1d173f38922c6b75c942d1dbf364716fc0eecbe24a8d6eb6c4 not found: ID does not exist" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.040889 4879 scope.go:117] "RemoveContainer" containerID="3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e" Nov 25 15:22:12 crc kubenswrapper[4879]: E1125 15:22:12.041086 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e\": container with ID starting with 3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e not found: ID does not exist" containerID="3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.041117 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e"} err="failed to get container status \"3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e\": rpc error: code = NotFound desc = could not find container \"3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e\": container with ID starting with 3555233dd7434ce904ac3322c0b6cb6d6969e1072e20b0540c30699befb8cf5e not found: ID does not exist" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.041180 4879 scope.go:117] "RemoveContainer" containerID="2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa" Nov 25 15:22:12 crc kubenswrapper[4879]: E1125 15:22:12.041546 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa\": container with ID starting with 2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa not found: ID does not exist" containerID="2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.041573 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa"} err="failed to get container status \"2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa\": rpc error: code = NotFound desc = could not find container \"2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa\": container with ID starting with 2536a664f5b71a7b392ec6f8c1da5de3b836172cf24a1d5878f4cfef401f2aaa not found: ID does not exist" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.499757 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12f8d906-802f-4a83-a324-f8a8989472e2" (UID: "12f8d906-802f-4a83-a324-f8a8989472e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.517635 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12f8d906-802f-4a83-a324-f8a8989472e2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.617186 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fct7l"] Nov 25 15:22:12 crc kubenswrapper[4879]: I1125 15:22:12.638783 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fct7l"] Nov 25 15:22:13 crc kubenswrapper[4879]: I1125 15:22:13.656100 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" path="/var/lib/kubelet/pods/12f8d906-802f-4a83-a324-f8a8989472e2/volumes" Nov 25 15:22:47 crc kubenswrapper[4879]: I1125 15:22:47.409099 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:22:47 crc kubenswrapper[4879]: I1125 15:22:47.409868 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:23:17 crc kubenswrapper[4879]: I1125 15:23:17.409213 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:23:17 crc kubenswrapper[4879]: I1125 15:23:17.410155 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.586698 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-phpkh"] Nov 25 15:23:43 crc kubenswrapper[4879]: E1125 15:23:43.587545 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="registry-server" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.587558 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="registry-server" Nov 25 15:23:43 crc kubenswrapper[4879]: E1125 15:23:43.587573 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="extract-utilities" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.587580 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="extract-utilities" Nov 25 15:23:43 crc kubenswrapper[4879]: E1125 15:23:43.587599 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="extract-content" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.587608 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="extract-content" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.587792 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f8d906-802f-4a83-a324-f8a8989472e2" containerName="registry-server" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.588918 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.601604 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-phpkh"] Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.698866 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-utilities\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.698951 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-catalog-content\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.699098 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2n48\" (UniqueName: \"kubernetes.io/projected/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-kube-api-access-p2n48\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.800881 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-catalog-content\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.800979 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2n48\" (UniqueName: \"kubernetes.io/projected/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-kube-api-access-p2n48\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.801403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-utilities\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.801456 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-catalog-content\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.801778 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-utilities\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.820105 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2n48\" (UniqueName: \"kubernetes.io/projected/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-kube-api-access-p2n48\") pod \"redhat-marketplace-phpkh\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:43 crc kubenswrapper[4879]: I1125 15:23:43.918692 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:44 crc kubenswrapper[4879]: I1125 15:23:44.376322 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-phpkh"] Nov 25 15:23:44 crc kubenswrapper[4879]: I1125 15:23:44.733524 4879 generic.go:334] "Generic (PLEG): container finished" podID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerID="7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c" exitCode=0 Nov 25 15:23:44 crc kubenswrapper[4879]: I1125 15:23:44.733686 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-phpkh" event={"ID":"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191","Type":"ContainerDied","Data":"7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c"} Nov 25 15:23:44 crc kubenswrapper[4879]: I1125 15:23:44.733832 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-phpkh" event={"ID":"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191","Type":"ContainerStarted","Data":"8efece549f0a44d8b12fe558ceef2cea346579ab746207ba5310f3d361cd000f"} Nov 25 15:23:46 crc kubenswrapper[4879]: I1125 15:23:46.771329 4879 generic.go:334] "Generic (PLEG): container finished" podID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerID="8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d" exitCode=0 Nov 25 15:23:46 crc kubenswrapper[4879]: I1125 15:23:46.771412 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-phpkh" event={"ID":"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191","Type":"ContainerDied","Data":"8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d"} Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.408747 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.408806 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.408856 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.409566 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b33f8a28bad6d5ae8e454d323ebc77517fd2a0380847605d4b69853e3f15879"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.409692 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://5b33f8a28bad6d5ae8e454d323ebc77517fd2a0380847605d4b69853e3f15879" gracePeriod=600 Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.779847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-phpkh" event={"ID":"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191","Type":"ContainerStarted","Data":"b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd"} Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.783794 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="5b33f8a28bad6d5ae8e454d323ebc77517fd2a0380847605d4b69853e3f15879" exitCode=0 Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.783834 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"5b33f8a28bad6d5ae8e454d323ebc77517fd2a0380847605d4b69853e3f15879"} Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.783863 4879 scope.go:117] "RemoveContainer" containerID="5ab193e8b77d8a37141ae2aa3e0ed6eb3795ce358b2e455ca2f839ba3cf59c3f" Nov 25 15:23:47 crc kubenswrapper[4879]: I1125 15:23:47.798933 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-phpkh" podStartSLOduration=2.108627398 podStartE2EDuration="4.798910875s" podCreationTimestamp="2025-11-25 15:23:43 +0000 UTC" firstStartedPulling="2025-11-25 15:23:44.735885581 +0000 UTC m=+3516.339298652" lastFinishedPulling="2025-11-25 15:23:47.426169058 +0000 UTC m=+3519.029582129" observedRunningTime="2025-11-25 15:23:47.79320592 +0000 UTC m=+3519.396618991" watchObservedRunningTime="2025-11-25 15:23:47.798910875 +0000 UTC m=+3519.402323946" Nov 25 15:23:48 crc kubenswrapper[4879]: I1125 15:23:48.795020 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a"} Nov 25 15:23:53 crc kubenswrapper[4879]: I1125 15:23:53.919615 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:53 crc kubenswrapper[4879]: I1125 15:23:53.920499 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:53 crc kubenswrapper[4879]: I1125 15:23:53.987946 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:54 crc kubenswrapper[4879]: I1125 15:23:54.891325 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:54 crc kubenswrapper[4879]: I1125 15:23:54.937031 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-phpkh"] Nov 25 15:23:56 crc kubenswrapper[4879]: I1125 15:23:56.871013 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-phpkh" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="registry-server" containerID="cri-o://b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd" gracePeriod=2 Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.296463 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.402932 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2n48\" (UniqueName: \"kubernetes.io/projected/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-kube-api-access-p2n48\") pod \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.403042 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-catalog-content\") pod \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.403103 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-utilities\") pod \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\" (UID: \"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191\") " Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.404186 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-utilities" (OuterVolumeSpecName: "utilities") pod "4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" (UID: "4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.409926 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-kube-api-access-p2n48" (OuterVolumeSpecName: "kube-api-access-p2n48") pod "4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" (UID: "4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191"). InnerVolumeSpecName "kube-api-access-p2n48". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.425233 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" (UID: "4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.504226 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.504262 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.504271 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2n48\" (UniqueName: \"kubernetes.io/projected/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191-kube-api-access-p2n48\") on node \"crc\" DevicePath \"\"" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.880705 4879 generic.go:334] "Generic (PLEG): container finished" podID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerID="b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd" exitCode=0 Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.880752 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-phpkh" event={"ID":"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191","Type":"ContainerDied","Data":"b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd"} Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.880782 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-phpkh" event={"ID":"4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191","Type":"ContainerDied","Data":"8efece549f0a44d8b12fe558ceef2cea346579ab746207ba5310f3d361cd000f"} Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.880804 4879 scope.go:117] "RemoveContainer" containerID="b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.880804 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-phpkh" Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.903456 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-phpkh"] Nov 25 15:23:57 crc kubenswrapper[4879]: I1125 15:23:57.910843 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-phpkh"] Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.052200 4879 scope.go:117] "RemoveContainer" containerID="8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.070313 4879 scope.go:117] "RemoveContainer" containerID="7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.102534 4879 scope.go:117] "RemoveContainer" containerID="b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd" Nov 25 15:23:58 crc kubenswrapper[4879]: E1125 15:23:58.103166 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd\": container with ID starting with b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd not found: ID does not exist" containerID="b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.103205 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd"} err="failed to get container status \"b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd\": rpc error: code = NotFound desc = could not find container \"b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd\": container with ID starting with b248c05dcf0eb5c178a9793360e1c0f2ad659510bdeaae6a5babdcdfc5a0aadd not found: ID does not exist" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.103231 4879 scope.go:117] "RemoveContainer" containerID="8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d" Nov 25 15:23:58 crc kubenswrapper[4879]: E1125 15:23:58.103496 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d\": container with ID starting with 8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d not found: ID does not exist" containerID="8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.103549 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d"} err="failed to get container status \"8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d\": rpc error: code = NotFound desc = could not find container \"8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d\": container with ID starting with 8d717ec177148e379909df38af923da469f5cf8d315939b98610be8407bfed3d not found: ID does not exist" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.103585 4879 scope.go:117] "RemoveContainer" containerID="7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c" Nov 25 15:23:58 crc kubenswrapper[4879]: E1125 15:23:58.104099 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c\": container with ID starting with 7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c not found: ID does not exist" containerID="7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c" Nov 25 15:23:58 crc kubenswrapper[4879]: I1125 15:23:58.104194 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c"} err="failed to get container status \"7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c\": rpc error: code = NotFound desc = could not find container \"7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c\": container with ID starting with 7b130b064925e923f7605e748dc7018f269eb4aaeba5a2bce8c7616ea59e390c not found: ID does not exist" Nov 25 15:23:59 crc kubenswrapper[4879]: I1125 15:23:59.652381 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" path="/var/lib/kubelet/pods/4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191/volumes" Nov 25 15:25:47 crc kubenswrapper[4879]: I1125 15:25:47.409488 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:25:47 crc kubenswrapper[4879]: I1125 15:25:47.411183 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:26:17 crc kubenswrapper[4879]: I1125 15:26:17.409236 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:26:17 crc kubenswrapper[4879]: I1125 15:26:17.409791 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.887464 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-thqn6"] Nov 25 15:26:42 crc kubenswrapper[4879]: E1125 15:26:42.888356 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="registry-server" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.888370 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="registry-server" Nov 25 15:26:42 crc kubenswrapper[4879]: E1125 15:26:42.888396 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="extract-content" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.888403 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="extract-content" Nov 25 15:26:42 crc kubenswrapper[4879]: E1125 15:26:42.888426 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="extract-utilities" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.888434 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="extract-utilities" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.888572 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b80b19e-b0c9-4553-b8d6-7b0fa9e7a191" containerName="registry-server" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.889609 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:42 crc kubenswrapper[4879]: I1125 15:26:42.903896 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-thqn6"] Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.002265 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmf5p\" (UniqueName: \"kubernetes.io/projected/3db4f3a9-df34-4167-9ac2-0e2b613f7870-kube-api-access-zmf5p\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.002603 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-utilities\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.002721 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-catalog-content\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.104365 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-utilities\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.104445 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-catalog-content\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.104493 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmf5p\" (UniqueName: \"kubernetes.io/projected/3db4f3a9-df34-4167-9ac2-0e2b613f7870-kube-api-access-zmf5p\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.105038 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-utilities\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.105111 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-catalog-content\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.123851 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmf5p\" (UniqueName: \"kubernetes.io/projected/3db4f3a9-df34-4167-9ac2-0e2b613f7870-kube-api-access-zmf5p\") pod \"certified-operators-thqn6\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.209377 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:43 crc kubenswrapper[4879]: I1125 15:26:43.779861 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-thqn6"] Nov 25 15:26:44 crc kubenswrapper[4879]: I1125 15:26:44.178188 4879 generic.go:334] "Generic (PLEG): container finished" podID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerID="ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114" exitCode=0 Nov 25 15:26:44 crc kubenswrapper[4879]: I1125 15:26:44.178244 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-thqn6" event={"ID":"3db4f3a9-df34-4167-9ac2-0e2b613f7870","Type":"ContainerDied","Data":"ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114"} Nov 25 15:26:44 crc kubenswrapper[4879]: I1125 15:26:44.178277 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-thqn6" event={"ID":"3db4f3a9-df34-4167-9ac2-0e2b613f7870","Type":"ContainerStarted","Data":"9c23d34654e3904767910a06d428073392d9a4299ad13329a3d21f3b446de943"} Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.203139 4879 generic.go:334] "Generic (PLEG): container finished" podID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerID="b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1" exitCode=0 Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.203715 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-thqn6" event={"ID":"3db4f3a9-df34-4167-9ac2-0e2b613f7870","Type":"ContainerDied","Data":"b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1"} Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.409250 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.409307 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.409349 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.410028 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:26:47 crc kubenswrapper[4879]: I1125 15:26:47.410107 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" gracePeriod=600 Nov 25 15:26:47 crc kubenswrapper[4879]: E1125 15:26:47.583642 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:26:48 crc kubenswrapper[4879]: I1125 15:26:48.213473 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-thqn6" event={"ID":"3db4f3a9-df34-4167-9ac2-0e2b613f7870","Type":"ContainerStarted","Data":"36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3"} Nov 25 15:26:48 crc kubenswrapper[4879]: I1125 15:26:48.216042 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" exitCode=0 Nov 25 15:26:48 crc kubenswrapper[4879]: I1125 15:26:48.216109 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a"} Nov 25 15:26:48 crc kubenswrapper[4879]: I1125 15:26:48.216284 4879 scope.go:117] "RemoveContainer" containerID="5b33f8a28bad6d5ae8e454d323ebc77517fd2a0380847605d4b69853e3f15879" Nov 25 15:26:48 crc kubenswrapper[4879]: I1125 15:26:48.216843 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:26:48 crc kubenswrapper[4879]: E1125 15:26:48.217109 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:26:48 crc kubenswrapper[4879]: I1125 15:26:48.235939 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-thqn6" podStartSLOduration=2.668163422 podStartE2EDuration="6.235921398s" podCreationTimestamp="2025-11-25 15:26:42 +0000 UTC" firstStartedPulling="2025-11-25 15:26:44.179909355 +0000 UTC m=+3695.783322426" lastFinishedPulling="2025-11-25 15:26:47.747667331 +0000 UTC m=+3699.351080402" observedRunningTime="2025-11-25 15:26:48.230860621 +0000 UTC m=+3699.834273692" watchObservedRunningTime="2025-11-25 15:26:48.235921398 +0000 UTC m=+3699.839334469" Nov 25 15:26:53 crc kubenswrapper[4879]: I1125 15:26:53.209924 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:53 crc kubenswrapper[4879]: I1125 15:26:53.210261 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:53 crc kubenswrapper[4879]: I1125 15:26:53.254601 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:53 crc kubenswrapper[4879]: I1125 15:26:53.305182 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:53 crc kubenswrapper[4879]: I1125 15:26:53.493803 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-thqn6"] Nov 25 15:26:55 crc kubenswrapper[4879]: I1125 15:26:55.276565 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-thqn6" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="registry-server" containerID="cri-o://36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3" gracePeriod=2 Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.164344 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.284745 4879 generic.go:334] "Generic (PLEG): container finished" podID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerID="36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3" exitCode=0 Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.284795 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-thqn6" event={"ID":"3db4f3a9-df34-4167-9ac2-0e2b613f7870","Type":"ContainerDied","Data":"36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3"} Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.284836 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-thqn6" event={"ID":"3db4f3a9-df34-4167-9ac2-0e2b613f7870","Type":"ContainerDied","Data":"9c23d34654e3904767910a06d428073392d9a4299ad13329a3d21f3b446de943"} Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.284841 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-thqn6" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.284860 4879 scope.go:117] "RemoveContainer" containerID="36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.303579 4879 scope.go:117] "RemoveContainer" containerID="b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.309423 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-catalog-content\") pod \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.309532 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmf5p\" (UniqueName: \"kubernetes.io/projected/3db4f3a9-df34-4167-9ac2-0e2b613f7870-kube-api-access-zmf5p\") pod \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.309618 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-utilities\") pod \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\" (UID: \"3db4f3a9-df34-4167-9ac2-0e2b613f7870\") " Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.310789 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-utilities" (OuterVolumeSpecName: "utilities") pod "3db4f3a9-df34-4167-9ac2-0e2b613f7870" (UID: "3db4f3a9-df34-4167-9ac2-0e2b613f7870"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.319559 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3db4f3a9-df34-4167-9ac2-0e2b613f7870-kube-api-access-zmf5p" (OuterVolumeSpecName: "kube-api-access-zmf5p") pod "3db4f3a9-df34-4167-9ac2-0e2b613f7870" (UID: "3db4f3a9-df34-4167-9ac2-0e2b613f7870"). InnerVolumeSpecName "kube-api-access-zmf5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.326384 4879 scope.go:117] "RemoveContainer" containerID="ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.362585 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3db4f3a9-df34-4167-9ac2-0e2b613f7870" (UID: "3db4f3a9-df34-4167-9ac2-0e2b613f7870"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.380008 4879 scope.go:117] "RemoveContainer" containerID="36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3" Nov 25 15:26:56 crc kubenswrapper[4879]: E1125 15:26:56.380618 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3\": container with ID starting with 36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3 not found: ID does not exist" containerID="36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.380681 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3"} err="failed to get container status \"36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3\": rpc error: code = NotFound desc = could not find container \"36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3\": container with ID starting with 36c2fa523dbc93a812baa32ba23cf02baff405939b8f5825b52730dc5cdb6ad3 not found: ID does not exist" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.380713 4879 scope.go:117] "RemoveContainer" containerID="b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1" Nov 25 15:26:56 crc kubenswrapper[4879]: E1125 15:26:56.381108 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1\": container with ID starting with b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1 not found: ID does not exist" containerID="b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.381164 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1"} err="failed to get container status \"b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1\": rpc error: code = NotFound desc = could not find container \"b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1\": container with ID starting with b1c0061b34e52ea6a93e230c1c8436dce26c7e43d9b2d0df81c1c8786140ace1 not found: ID does not exist" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.381195 4879 scope.go:117] "RemoveContainer" containerID="ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114" Nov 25 15:26:56 crc kubenswrapper[4879]: E1125 15:26:56.381697 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114\": container with ID starting with ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114 not found: ID does not exist" containerID="ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.381749 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114"} err="failed to get container status \"ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114\": rpc error: code = NotFound desc = could not find container \"ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114\": container with ID starting with ceedcff7dfd2eefdd4c15bb510766c4734dbdb3692d5d300469a5139bcb2f114 not found: ID does not exist" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.411068 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.411137 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmf5p\" (UniqueName: \"kubernetes.io/projected/3db4f3a9-df34-4167-9ac2-0e2b613f7870-kube-api-access-zmf5p\") on node \"crc\" DevicePath \"\"" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.411152 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3db4f3a9-df34-4167-9ac2-0e2b613f7870-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.615572 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-thqn6"] Nov 25 15:26:56 crc kubenswrapper[4879]: I1125 15:26:56.623137 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-thqn6"] Nov 25 15:26:57 crc kubenswrapper[4879]: I1125 15:26:57.653862 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" path="/var/lib/kubelet/pods/3db4f3a9-df34-4167-9ac2-0e2b613f7870/volumes" Nov 25 15:26:59 crc kubenswrapper[4879]: I1125 15:26:59.649882 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:26:59 crc kubenswrapper[4879]: E1125 15:26:59.650194 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.106351 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k8tjf"] Nov 25 15:27:10 crc kubenswrapper[4879]: E1125 15:27:10.107861 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="extract-content" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.107881 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="extract-content" Nov 25 15:27:10 crc kubenswrapper[4879]: E1125 15:27:10.107897 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="extract-utilities" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.107905 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="extract-utilities" Nov 25 15:27:10 crc kubenswrapper[4879]: E1125 15:27:10.107934 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="registry-server" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.107942 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="registry-server" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.108155 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3db4f3a9-df34-4167-9ac2-0e2b613f7870" containerName="registry-server" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.109517 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.126011 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8tjf"] Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.219705 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zdw2\" (UniqueName: \"kubernetes.io/projected/6149d816-88ec-4de7-bdf4-ab7ddad33069-kube-api-access-5zdw2\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.219786 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6149d816-88ec-4de7-bdf4-ab7ddad33069-utilities\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.219829 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6149d816-88ec-4de7-bdf4-ab7ddad33069-catalog-content\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.320933 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zdw2\" (UniqueName: \"kubernetes.io/projected/6149d816-88ec-4de7-bdf4-ab7ddad33069-kube-api-access-5zdw2\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.321248 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6149d816-88ec-4de7-bdf4-ab7ddad33069-utilities\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.321403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6149d816-88ec-4de7-bdf4-ab7ddad33069-catalog-content\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.321880 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6149d816-88ec-4de7-bdf4-ab7ddad33069-utilities\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.321895 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6149d816-88ec-4de7-bdf4-ab7ddad33069-catalog-content\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.342946 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zdw2\" (UniqueName: \"kubernetes.io/projected/6149d816-88ec-4de7-bdf4-ab7ddad33069-kube-api-access-5zdw2\") pod \"community-operators-k8tjf\" (UID: \"6149d816-88ec-4de7-bdf4-ab7ddad33069\") " pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.439940 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:10 crc kubenswrapper[4879]: I1125 15:27:10.946674 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8tjf"] Nov 25 15:27:11 crc kubenswrapper[4879]: I1125 15:27:11.405372 4879 generic.go:334] "Generic (PLEG): container finished" podID="6149d816-88ec-4de7-bdf4-ab7ddad33069" containerID="e3c06fde22304cf0313bb62cc0aad5e42685b582cf6738ab8d49d8b699ec74b8" exitCode=0 Nov 25 15:27:11 crc kubenswrapper[4879]: I1125 15:27:11.405412 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8tjf" event={"ID":"6149d816-88ec-4de7-bdf4-ab7ddad33069","Type":"ContainerDied","Data":"e3c06fde22304cf0313bb62cc0aad5e42685b582cf6738ab8d49d8b699ec74b8"} Nov 25 15:27:11 crc kubenswrapper[4879]: I1125 15:27:11.405463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8tjf" event={"ID":"6149d816-88ec-4de7-bdf4-ab7ddad33069","Type":"ContainerStarted","Data":"20e788b021897f29319b3c46d898e0dd959ce95c8ef8c23dce9a4960eb5cbf8b"} Nov 25 15:27:11 crc kubenswrapper[4879]: I1125 15:27:11.407565 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:27:13 crc kubenswrapper[4879]: I1125 15:27:13.645208 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:27:13 crc kubenswrapper[4879]: E1125 15:27:13.645746 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:27:16 crc kubenswrapper[4879]: I1125 15:27:16.441302 4879 generic.go:334] "Generic (PLEG): container finished" podID="6149d816-88ec-4de7-bdf4-ab7ddad33069" containerID="a9c8383ea87a9d67b71b9002d73fa4a2fcc6234191d73ad483ea1520dca886d2" exitCode=0 Nov 25 15:27:16 crc kubenswrapper[4879]: I1125 15:27:16.441409 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8tjf" event={"ID":"6149d816-88ec-4de7-bdf4-ab7ddad33069","Type":"ContainerDied","Data":"a9c8383ea87a9d67b71b9002d73fa4a2fcc6234191d73ad483ea1520dca886d2"} Nov 25 15:27:17 crc kubenswrapper[4879]: I1125 15:27:17.460388 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8tjf" event={"ID":"6149d816-88ec-4de7-bdf4-ab7ddad33069","Type":"ContainerStarted","Data":"62870699e5fd15b4e52dd24f512b9ef5399fef0feb8d3ab09451bfdc64a1d1e2"} Nov 25 15:27:17 crc kubenswrapper[4879]: I1125 15:27:17.477997 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k8tjf" podStartSLOduration=1.762736254 podStartE2EDuration="7.477953348s" podCreationTimestamp="2025-11-25 15:27:10 +0000 UTC" firstStartedPulling="2025-11-25 15:27:11.407027157 +0000 UTC m=+3723.010440228" lastFinishedPulling="2025-11-25 15:27:17.122244211 +0000 UTC m=+3728.725657322" observedRunningTime="2025-11-25 15:27:17.476748565 +0000 UTC m=+3729.080161676" watchObservedRunningTime="2025-11-25 15:27:17.477953348 +0000 UTC m=+3729.081366459" Nov 25 15:27:20 crc kubenswrapper[4879]: I1125 15:27:20.440458 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:20 crc kubenswrapper[4879]: I1125 15:27:20.440807 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:20 crc kubenswrapper[4879]: I1125 15:27:20.486609 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:28 crc kubenswrapper[4879]: I1125 15:27:28.644860 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:27:28 crc kubenswrapper[4879]: E1125 15:27:28.645488 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:27:30 crc kubenswrapper[4879]: I1125 15:27:30.477728 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k8tjf" Nov 25 15:27:30 crc kubenswrapper[4879]: I1125 15:27:30.539442 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8tjf"] Nov 25 15:27:30 crc kubenswrapper[4879]: I1125 15:27:30.593630 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:27:30 crc kubenswrapper[4879]: I1125 15:27:30.594694 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8qnz8" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="registry-server" containerID="cri-o://4c4409af6b4a5b1d4273f0d040964206a0f47d41c78e92d9f87aad7859b0b3a6" gracePeriod=2 Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.569278 4879 generic.go:334] "Generic (PLEG): container finished" podID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerID="4c4409af6b4a5b1d4273f0d040964206a0f47d41c78e92d9f87aad7859b0b3a6" exitCode=0 Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.569459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8qnz8" event={"ID":"7e40db6b-1e76-4192-99bc-900d85e8e10f","Type":"ContainerDied","Data":"4c4409af6b4a5b1d4273f0d040964206a0f47d41c78e92d9f87aad7859b0b3a6"} Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.570947 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8qnz8" event={"ID":"7e40db6b-1e76-4192-99bc-900d85e8e10f","Type":"ContainerDied","Data":"aacdfca232f823afa9b76bb36b530b2bebeee582fa066f3dca4bb808a845c373"} Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.570977 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aacdfca232f823afa9b76bb36b530b2bebeee582fa066f3dca4bb808a845c373" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.571270 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.742420 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6p2t\" (UniqueName: \"kubernetes.io/projected/7e40db6b-1e76-4192-99bc-900d85e8e10f-kube-api-access-h6p2t\") pod \"7e40db6b-1e76-4192-99bc-900d85e8e10f\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.742552 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-utilities\") pod \"7e40db6b-1e76-4192-99bc-900d85e8e10f\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.742649 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-catalog-content\") pod \"7e40db6b-1e76-4192-99bc-900d85e8e10f\" (UID: \"7e40db6b-1e76-4192-99bc-900d85e8e10f\") " Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.743711 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-utilities" (OuterVolumeSpecName: "utilities") pod "7e40db6b-1e76-4192-99bc-900d85e8e10f" (UID: "7e40db6b-1e76-4192-99bc-900d85e8e10f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.751815 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e40db6b-1e76-4192-99bc-900d85e8e10f-kube-api-access-h6p2t" (OuterVolumeSpecName: "kube-api-access-h6p2t") pod "7e40db6b-1e76-4192-99bc-900d85e8e10f" (UID: "7e40db6b-1e76-4192-99bc-900d85e8e10f"). InnerVolumeSpecName "kube-api-access-h6p2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.800401 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7e40db6b-1e76-4192-99bc-900d85e8e10f" (UID: "7e40db6b-1e76-4192-99bc-900d85e8e10f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.844405 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.844450 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6p2t\" (UniqueName: \"kubernetes.io/projected/7e40db6b-1e76-4192-99bc-900d85e8e10f-kube-api-access-h6p2t\") on node \"crc\" DevicePath \"\"" Nov 25 15:27:31 crc kubenswrapper[4879]: I1125 15:27:31.844465 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e40db6b-1e76-4192-99bc-900d85e8e10f-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:27:32 crc kubenswrapper[4879]: I1125 15:27:32.578164 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8qnz8" Nov 25 15:27:32 crc kubenswrapper[4879]: I1125 15:27:32.614301 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:27:32 crc kubenswrapper[4879]: I1125 15:27:32.620031 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8qnz8"] Nov 25 15:27:33 crc kubenswrapper[4879]: I1125 15:27:33.657499 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" path="/var/lib/kubelet/pods/7e40db6b-1e76-4192-99bc-900d85e8e10f/volumes" Nov 25 15:27:41 crc kubenswrapper[4879]: I1125 15:27:41.722449 4879 scope.go:117] "RemoveContainer" containerID="4c4409af6b4a5b1d4273f0d040964206a0f47d41c78e92d9f87aad7859b0b3a6" Nov 25 15:27:41 crc kubenswrapper[4879]: I1125 15:27:41.747514 4879 scope.go:117] "RemoveContainer" containerID="da1c33d559705dd479f9aa597118455fe50ad4af7be9130c43b407c5bb284447" Nov 25 15:27:41 crc kubenswrapper[4879]: I1125 15:27:41.765440 4879 scope.go:117] "RemoveContainer" containerID="75446bd566643b5ebb79edd05f837ef0cc22bbcc0cfcfd6b20b7441745dbd1ac" Nov 25 15:27:43 crc kubenswrapper[4879]: I1125 15:27:43.646545 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:27:43 crc kubenswrapper[4879]: E1125 15:27:43.646803 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:27:56 crc kubenswrapper[4879]: I1125 15:27:56.644633 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:27:56 crc kubenswrapper[4879]: E1125 15:27:56.645666 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:28:08 crc kubenswrapper[4879]: I1125 15:28:08.645350 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:28:08 crc kubenswrapper[4879]: E1125 15:28:08.646243 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:28:21 crc kubenswrapper[4879]: I1125 15:28:21.645347 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:28:21 crc kubenswrapper[4879]: E1125 15:28:21.646152 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:28:32 crc kubenswrapper[4879]: I1125 15:28:32.645075 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:28:32 crc kubenswrapper[4879]: E1125 15:28:32.645723 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:28:43 crc kubenswrapper[4879]: I1125 15:28:43.644403 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:28:43 crc kubenswrapper[4879]: E1125 15:28:43.645113 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:28:56 crc kubenswrapper[4879]: I1125 15:28:56.645993 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:28:56 crc kubenswrapper[4879]: E1125 15:28:56.646968 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:29:11 crc kubenswrapper[4879]: I1125 15:29:11.644642 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:29:11 crc kubenswrapper[4879]: E1125 15:29:11.645553 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:29:26 crc kubenswrapper[4879]: I1125 15:29:26.645747 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:29:26 crc kubenswrapper[4879]: E1125 15:29:26.646702 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:29:40 crc kubenswrapper[4879]: I1125 15:29:40.644689 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:29:40 crc kubenswrapper[4879]: E1125 15:29:40.645495 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:29:55 crc kubenswrapper[4879]: I1125 15:29:55.645348 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:29:55 crc kubenswrapper[4879]: E1125 15:29:55.647686 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.167770 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh"] Nov 25 15:30:00 crc kubenswrapper[4879]: E1125 15:30:00.168680 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="extract-content" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.168699 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="extract-content" Nov 25 15:30:00 crc kubenswrapper[4879]: E1125 15:30:00.168711 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="extract-utilities" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.168718 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="extract-utilities" Nov 25 15:30:00 crc kubenswrapper[4879]: E1125 15:30:00.168734 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="registry-server" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.168746 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="registry-server" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.168965 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e40db6b-1e76-4192-99bc-900d85e8e10f" containerName="registry-server" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.169516 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.172294 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.177871 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.192355 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh"] Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.297376 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f143b190-3df1-4505-aaf0-576be693a9ff-config-volume\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.297640 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f143b190-3df1-4505-aaf0-576be693a9ff-secret-volume\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.297770 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l78vs\" (UniqueName: \"kubernetes.io/projected/f143b190-3df1-4505-aaf0-576be693a9ff-kube-api-access-l78vs\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.399443 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f143b190-3df1-4505-aaf0-576be693a9ff-config-volume\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.399507 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f143b190-3df1-4505-aaf0-576be693a9ff-secret-volume\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.399574 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l78vs\" (UniqueName: \"kubernetes.io/projected/f143b190-3df1-4505-aaf0-576be693a9ff-kube-api-access-l78vs\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.400666 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f143b190-3df1-4505-aaf0-576be693a9ff-config-volume\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.405265 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f143b190-3df1-4505-aaf0-576be693a9ff-secret-volume\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.415726 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l78vs\" (UniqueName: \"kubernetes.io/projected/f143b190-3df1-4505-aaf0-576be693a9ff-kube-api-access-l78vs\") pod \"collect-profiles-29401410-vxknh\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.491021 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:00 crc kubenswrapper[4879]: I1125 15:30:00.920228 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh"] Nov 25 15:30:01 crc kubenswrapper[4879]: I1125 15:30:01.727219 4879 generic.go:334] "Generic (PLEG): container finished" podID="f143b190-3df1-4505-aaf0-576be693a9ff" containerID="5bab8a4368329fc30e4d3e883a2e672eeed965fb7d4f5d4750e2170887ee4340" exitCode=0 Nov 25 15:30:01 crc kubenswrapper[4879]: I1125 15:30:01.727270 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" event={"ID":"f143b190-3df1-4505-aaf0-576be693a9ff","Type":"ContainerDied","Data":"5bab8a4368329fc30e4d3e883a2e672eeed965fb7d4f5d4750e2170887ee4340"} Nov 25 15:30:01 crc kubenswrapper[4879]: I1125 15:30:01.727301 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" event={"ID":"f143b190-3df1-4505-aaf0-576be693a9ff","Type":"ContainerStarted","Data":"bf32892f888f2f783a543b8fdd23922e4cd694ff34da69c4083b0f96b119eb86"} Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.028694 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.139702 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f143b190-3df1-4505-aaf0-576be693a9ff-secret-volume\") pod \"f143b190-3df1-4505-aaf0-576be693a9ff\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.140102 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l78vs\" (UniqueName: \"kubernetes.io/projected/f143b190-3df1-4505-aaf0-576be693a9ff-kube-api-access-l78vs\") pod \"f143b190-3df1-4505-aaf0-576be693a9ff\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.140294 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f143b190-3df1-4505-aaf0-576be693a9ff-config-volume\") pod \"f143b190-3df1-4505-aaf0-576be693a9ff\" (UID: \"f143b190-3df1-4505-aaf0-576be693a9ff\") " Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.141582 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f143b190-3df1-4505-aaf0-576be693a9ff-config-volume" (OuterVolumeSpecName: "config-volume") pod "f143b190-3df1-4505-aaf0-576be693a9ff" (UID: "f143b190-3df1-4505-aaf0-576be693a9ff"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.146975 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f143b190-3df1-4505-aaf0-576be693a9ff-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f143b190-3df1-4505-aaf0-576be693a9ff" (UID: "f143b190-3df1-4505-aaf0-576be693a9ff"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.147113 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f143b190-3df1-4505-aaf0-576be693a9ff-kube-api-access-l78vs" (OuterVolumeSpecName: "kube-api-access-l78vs") pod "f143b190-3df1-4505-aaf0-576be693a9ff" (UID: "f143b190-3df1-4505-aaf0-576be693a9ff"). InnerVolumeSpecName "kube-api-access-l78vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.242247 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f143b190-3df1-4505-aaf0-576be693a9ff-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.242295 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f143b190-3df1-4505-aaf0-576be693a9ff-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.242316 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l78vs\" (UniqueName: \"kubernetes.io/projected/f143b190-3df1-4505-aaf0-576be693a9ff-kube-api-access-l78vs\") on node \"crc\" DevicePath \"\"" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.741815 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" event={"ID":"f143b190-3df1-4505-aaf0-576be693a9ff","Type":"ContainerDied","Data":"bf32892f888f2f783a543b8fdd23922e4cd694ff34da69c4083b0f96b119eb86"} Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.742189 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf32892f888f2f783a543b8fdd23922e4cd694ff34da69c4083b0f96b119eb86" Nov 25 15:30:03 crc kubenswrapper[4879]: I1125 15:30:03.741870 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh" Nov 25 15:30:04 crc kubenswrapper[4879]: I1125 15:30:04.118735 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc"] Nov 25 15:30:04 crc kubenswrapper[4879]: I1125 15:30:04.124661 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401365-q5zlc"] Nov 25 15:30:05 crc kubenswrapper[4879]: I1125 15:30:05.655345 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fef8236-994c-4660-ad35-11071a8ca4e5" path="/var/lib/kubelet/pods/3fef8236-994c-4660-ad35-11071a8ca4e5/volumes" Nov 25 15:30:08 crc kubenswrapper[4879]: I1125 15:30:08.645106 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:30:08 crc kubenswrapper[4879]: E1125 15:30:08.645696 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:30:21 crc kubenswrapper[4879]: I1125 15:30:21.644956 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:30:21 crc kubenswrapper[4879]: E1125 15:30:21.645626 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:30:34 crc kubenswrapper[4879]: I1125 15:30:34.645047 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:30:34 crc kubenswrapper[4879]: E1125 15:30:34.645998 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:30:41 crc kubenswrapper[4879]: I1125 15:30:41.854760 4879 scope.go:117] "RemoveContainer" containerID="82b8d8f559936292b38d5d41780f507fbe3341fb01dac53b713d44502ac5aa73" Nov 25 15:30:49 crc kubenswrapper[4879]: I1125 15:30:49.659973 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:30:49 crc kubenswrapper[4879]: E1125 15:30:49.660713 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:31:00 crc kubenswrapper[4879]: I1125 15:31:00.644699 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:31:00 crc kubenswrapper[4879]: E1125 15:31:00.645260 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:31:14 crc kubenswrapper[4879]: I1125 15:31:14.644994 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:31:14 crc kubenswrapper[4879]: E1125 15:31:14.645841 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:31:25 crc kubenswrapper[4879]: I1125 15:31:25.645079 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:31:25 crc kubenswrapper[4879]: E1125 15:31:25.647971 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:31:38 crc kubenswrapper[4879]: I1125 15:31:38.644956 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:31:38 crc kubenswrapper[4879]: E1125 15:31:38.645785 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:31:53 crc kubenswrapper[4879]: I1125 15:31:53.646878 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:31:54 crc kubenswrapper[4879]: I1125 15:31:54.593289 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"7e942388aa2c69627a849f7912af16369f4f00ddc115880fdc841923530f1633"} Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.220144 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6rn56"] Nov 25 15:32:20 crc kubenswrapper[4879]: E1125 15:32:20.221242 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f143b190-3df1-4505-aaf0-576be693a9ff" containerName="collect-profiles" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.221271 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f143b190-3df1-4505-aaf0-576be693a9ff" containerName="collect-profiles" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.221601 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f143b190-3df1-4505-aaf0-576be693a9ff" containerName="collect-profiles" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.224622 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.232878 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6rn56"] Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.413341 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4qfx\" (UniqueName: \"kubernetes.io/projected/4e641dcf-d795-4baa-862a-b571f217f329-kube-api-access-h4qfx\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.413462 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-catalog-content\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.413490 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-utilities\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.514791 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4qfx\" (UniqueName: \"kubernetes.io/projected/4e641dcf-d795-4baa-862a-b571f217f329-kube-api-access-h4qfx\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.515265 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-catalog-content\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.515404 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-utilities\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.515846 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-catalog-content\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.515901 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-utilities\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.540911 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4qfx\" (UniqueName: \"kubernetes.io/projected/4e641dcf-d795-4baa-862a-b571f217f329-kube-api-access-h4qfx\") pod \"redhat-operators-6rn56\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:20 crc kubenswrapper[4879]: I1125 15:32:20.556444 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:21 crc kubenswrapper[4879]: I1125 15:32:21.046624 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6rn56"] Nov 25 15:32:21 crc kubenswrapper[4879]: I1125 15:32:21.803385 4879 generic.go:334] "Generic (PLEG): container finished" podID="4e641dcf-d795-4baa-862a-b571f217f329" containerID="d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0" exitCode=0 Nov 25 15:32:21 crc kubenswrapper[4879]: I1125 15:32:21.803686 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerDied","Data":"d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0"} Nov 25 15:32:21 crc kubenswrapper[4879]: I1125 15:32:21.803720 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerStarted","Data":"524dda63eac38ac160fe1f6a32af656b94a50b83836d2fe9ed460d79d0c063d9"} Nov 25 15:32:21 crc kubenswrapper[4879]: I1125 15:32:21.806794 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:32:22 crc kubenswrapper[4879]: I1125 15:32:22.815328 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerStarted","Data":"cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838"} Nov 25 15:32:23 crc kubenswrapper[4879]: I1125 15:32:23.825254 4879 generic.go:334] "Generic (PLEG): container finished" podID="4e641dcf-d795-4baa-862a-b571f217f329" containerID="cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838" exitCode=0 Nov 25 15:32:23 crc kubenswrapper[4879]: I1125 15:32:23.825305 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerDied","Data":"cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838"} Nov 25 15:32:24 crc kubenswrapper[4879]: I1125 15:32:24.834520 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerStarted","Data":"02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f"} Nov 25 15:32:24 crc kubenswrapper[4879]: I1125 15:32:24.854754 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6rn56" podStartSLOduration=2.44238435 podStartE2EDuration="4.854733143s" podCreationTimestamp="2025-11-25 15:32:20 +0000 UTC" firstStartedPulling="2025-11-25 15:32:21.806547974 +0000 UTC m=+4033.409961045" lastFinishedPulling="2025-11-25 15:32:24.218896767 +0000 UTC m=+4035.822309838" observedRunningTime="2025-11-25 15:32:24.850222822 +0000 UTC m=+4036.453635893" watchObservedRunningTime="2025-11-25 15:32:24.854733143 +0000 UTC m=+4036.458146214" Nov 25 15:32:30 crc kubenswrapper[4879]: I1125 15:32:30.557551 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:30 crc kubenswrapper[4879]: I1125 15:32:30.557939 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:30 crc kubenswrapper[4879]: I1125 15:32:30.598744 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:30 crc kubenswrapper[4879]: I1125 15:32:30.929295 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:30 crc kubenswrapper[4879]: I1125 15:32:30.976861 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6rn56"] Nov 25 15:32:32 crc kubenswrapper[4879]: I1125 15:32:32.902894 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6rn56" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="registry-server" containerID="cri-o://02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f" gracePeriod=2 Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.526440 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.701474 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-catalog-content\") pod \"4e641dcf-d795-4baa-862a-b571f217f329\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.701551 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4qfx\" (UniqueName: \"kubernetes.io/projected/4e641dcf-d795-4baa-862a-b571f217f329-kube-api-access-h4qfx\") pod \"4e641dcf-d795-4baa-862a-b571f217f329\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.701587 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-utilities\") pod \"4e641dcf-d795-4baa-862a-b571f217f329\" (UID: \"4e641dcf-d795-4baa-862a-b571f217f329\") " Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.702713 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-utilities" (OuterVolumeSpecName: "utilities") pod "4e641dcf-d795-4baa-862a-b571f217f329" (UID: "4e641dcf-d795-4baa-862a-b571f217f329"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.743110 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e641dcf-d795-4baa-862a-b571f217f329-kube-api-access-h4qfx" (OuterVolumeSpecName: "kube-api-access-h4qfx") pod "4e641dcf-d795-4baa-862a-b571f217f329" (UID: "4e641dcf-d795-4baa-862a-b571f217f329"). InnerVolumeSpecName "kube-api-access-h4qfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.803548 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4qfx\" (UniqueName: \"kubernetes.io/projected/4e641dcf-d795-4baa-862a-b571f217f329-kube-api-access-h4qfx\") on node \"crc\" DevicePath \"\"" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.803580 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.913749 4879 generic.go:334] "Generic (PLEG): container finished" podID="4e641dcf-d795-4baa-862a-b571f217f329" containerID="02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f" exitCode=0 Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.913817 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerDied","Data":"02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f"} Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.913871 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6rn56" event={"ID":"4e641dcf-d795-4baa-862a-b571f217f329","Type":"ContainerDied","Data":"524dda63eac38ac160fe1f6a32af656b94a50b83836d2fe9ed460d79d0c063d9"} Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.913890 4879 scope.go:117] "RemoveContainer" containerID="02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.914327 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6rn56" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.942621 4879 scope.go:117] "RemoveContainer" containerID="cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.976822 4879 scope.go:117] "RemoveContainer" containerID="d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.992996 4879 scope.go:117] "RemoveContainer" containerID="02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f" Nov 25 15:32:33 crc kubenswrapper[4879]: E1125 15:32:33.993712 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f\": container with ID starting with 02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f not found: ID does not exist" containerID="02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.993749 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f"} err="failed to get container status \"02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f\": rpc error: code = NotFound desc = could not find container \"02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f\": container with ID starting with 02653082fa5e6ca6510eb287aa00906a8c2e8e395dbe67c5b90856a5beea3a1f not found: ID does not exist" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.993781 4879 scope.go:117] "RemoveContainer" containerID="cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838" Nov 25 15:32:33 crc kubenswrapper[4879]: E1125 15:32:33.994143 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838\": container with ID starting with cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838 not found: ID does not exist" containerID="cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.994168 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838"} err="failed to get container status \"cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838\": rpc error: code = NotFound desc = could not find container \"cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838\": container with ID starting with cbbedf6c7ae52e4414d746d1506db3abcddfbda4daef4591e551ff86d8fee838 not found: ID does not exist" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.994186 4879 scope.go:117] "RemoveContainer" containerID="d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0" Nov 25 15:32:33 crc kubenswrapper[4879]: E1125 15:32:33.994527 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0\": container with ID starting with d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0 not found: ID does not exist" containerID="d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0" Nov 25 15:32:33 crc kubenswrapper[4879]: I1125 15:32:33.994558 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0"} err="failed to get container status \"d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0\": rpc error: code = NotFound desc = could not find container \"d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0\": container with ID starting with d44296bce35faa9f08545ac3a0c83d440f23c73b3ba26e6698dfc2ae478581c0 not found: ID does not exist" Nov 25 15:32:35 crc kubenswrapper[4879]: I1125 15:32:35.628738 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e641dcf-d795-4baa-862a-b571f217f329" (UID: "4e641dcf-d795-4baa-862a-b571f217f329"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:32:35 crc kubenswrapper[4879]: I1125 15:32:35.730461 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e641dcf-d795-4baa-862a-b571f217f329-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:32:35 crc kubenswrapper[4879]: I1125 15:32:35.737654 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6rn56"] Nov 25 15:32:35 crc kubenswrapper[4879]: I1125 15:32:35.744160 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6rn56"] Nov 25 15:32:37 crc kubenswrapper[4879]: I1125 15:32:37.672529 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e641dcf-d795-4baa-862a-b571f217f329" path="/var/lib/kubelet/pods/4e641dcf-d795-4baa-862a-b571f217f329/volumes" Nov 25 15:34:17 crc kubenswrapper[4879]: I1125 15:34:17.409256 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:34:17 crc kubenswrapper[4879]: I1125 15:34:17.409999 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.617482 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mkptl"] Nov 25 15:34:44 crc kubenswrapper[4879]: E1125 15:34:44.618292 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="extract-content" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.618305 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="extract-content" Nov 25 15:34:44 crc kubenswrapper[4879]: E1125 15:34:44.618321 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="registry-server" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.618326 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="registry-server" Nov 25 15:34:44 crc kubenswrapper[4879]: E1125 15:34:44.618338 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="extract-utilities" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.618346 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="extract-utilities" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.618494 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e641dcf-d795-4baa-862a-b571f217f329" containerName="registry-server" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.619470 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.629921 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkptl"] Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.722604 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-catalog-content\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.722774 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z66jm\" (UniqueName: \"kubernetes.io/projected/97380959-ef47-4019-b20b-3b758ff9d047-kube-api-access-z66jm\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.722823 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-utilities\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.824199 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-utilities\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.824304 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-catalog-content\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.824357 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z66jm\" (UniqueName: \"kubernetes.io/projected/97380959-ef47-4019-b20b-3b758ff9d047-kube-api-access-z66jm\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.824874 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-catalog-content\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.825176 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-utilities\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.842707 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z66jm\" (UniqueName: \"kubernetes.io/projected/97380959-ef47-4019-b20b-3b758ff9d047-kube-api-access-z66jm\") pod \"redhat-marketplace-mkptl\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:44 crc kubenswrapper[4879]: I1125 15:34:44.941151 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:45 crc kubenswrapper[4879]: I1125 15:34:45.346543 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkptl"] Nov 25 15:34:45 crc kubenswrapper[4879]: I1125 15:34:45.920095 4879 generic.go:334] "Generic (PLEG): container finished" podID="97380959-ef47-4019-b20b-3b758ff9d047" containerID="c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d" exitCode=0 Nov 25 15:34:45 crc kubenswrapper[4879]: I1125 15:34:45.920203 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerDied","Data":"c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d"} Nov 25 15:34:45 crc kubenswrapper[4879]: I1125 15:34:45.920492 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerStarted","Data":"eb0f21567da34913c023584ec4fcf52ed84c6433df1e5c25ff0e7591bbaa762d"} Nov 25 15:34:46 crc kubenswrapper[4879]: I1125 15:34:46.928833 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerStarted","Data":"9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf"} Nov 25 15:34:47 crc kubenswrapper[4879]: I1125 15:34:47.409025 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:34:47 crc kubenswrapper[4879]: I1125 15:34:47.409083 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:34:47 crc kubenswrapper[4879]: I1125 15:34:47.939477 4879 generic.go:334] "Generic (PLEG): container finished" podID="97380959-ef47-4019-b20b-3b758ff9d047" containerID="9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf" exitCode=0 Nov 25 15:34:47 crc kubenswrapper[4879]: I1125 15:34:47.939596 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerDied","Data":"9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf"} Nov 25 15:34:48 crc kubenswrapper[4879]: I1125 15:34:48.949694 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerStarted","Data":"58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e"} Nov 25 15:34:48 crc kubenswrapper[4879]: I1125 15:34:48.971554 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mkptl" podStartSLOduration=2.423057538 podStartE2EDuration="4.97153074s" podCreationTimestamp="2025-11-25 15:34:44 +0000 UTC" firstStartedPulling="2025-11-25 15:34:45.928799536 +0000 UTC m=+4177.532212607" lastFinishedPulling="2025-11-25 15:34:48.477272748 +0000 UTC m=+4180.080685809" observedRunningTime="2025-11-25 15:34:48.964761199 +0000 UTC m=+4180.568174270" watchObservedRunningTime="2025-11-25 15:34:48.97153074 +0000 UTC m=+4180.574943811" Nov 25 15:34:54 crc kubenswrapper[4879]: I1125 15:34:54.942152 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:54 crc kubenswrapper[4879]: I1125 15:34:54.942698 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:55 crc kubenswrapper[4879]: I1125 15:34:55.290629 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:55 crc kubenswrapper[4879]: I1125 15:34:55.332824 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:55 crc kubenswrapper[4879]: I1125 15:34:55.526661 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkptl"] Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.001419 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mkptl" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="registry-server" containerID="cri-o://58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e" gracePeriod=2 Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.377158 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.502772 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z66jm\" (UniqueName: \"kubernetes.io/projected/97380959-ef47-4019-b20b-3b758ff9d047-kube-api-access-z66jm\") pod \"97380959-ef47-4019-b20b-3b758ff9d047\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.502923 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-utilities\") pod \"97380959-ef47-4019-b20b-3b758ff9d047\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.502947 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-catalog-content\") pod \"97380959-ef47-4019-b20b-3b758ff9d047\" (UID: \"97380959-ef47-4019-b20b-3b758ff9d047\") " Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.504110 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-utilities" (OuterVolumeSpecName: "utilities") pod "97380959-ef47-4019-b20b-3b758ff9d047" (UID: "97380959-ef47-4019-b20b-3b758ff9d047"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.508600 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97380959-ef47-4019-b20b-3b758ff9d047-kube-api-access-z66jm" (OuterVolumeSpecName: "kube-api-access-z66jm") pod "97380959-ef47-4019-b20b-3b758ff9d047" (UID: "97380959-ef47-4019-b20b-3b758ff9d047"). InnerVolumeSpecName "kube-api-access-z66jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.526096 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97380959-ef47-4019-b20b-3b758ff9d047" (UID: "97380959-ef47-4019-b20b-3b758ff9d047"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.604467 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.604735 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97380959-ef47-4019-b20b-3b758ff9d047-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:34:57 crc kubenswrapper[4879]: I1125 15:34:57.604751 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z66jm\" (UniqueName: \"kubernetes.io/projected/97380959-ef47-4019-b20b-3b758ff9d047-kube-api-access-z66jm\") on node \"crc\" DevicePath \"\"" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.013314 4879 generic.go:334] "Generic (PLEG): container finished" podID="97380959-ef47-4019-b20b-3b758ff9d047" containerID="58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e" exitCode=0 Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.013441 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mkptl" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.013403 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerDied","Data":"58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e"} Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.013637 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mkptl" event={"ID":"97380959-ef47-4019-b20b-3b758ff9d047","Type":"ContainerDied","Data":"eb0f21567da34913c023584ec4fcf52ed84c6433df1e5c25ff0e7591bbaa762d"} Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.013696 4879 scope.go:117] "RemoveContainer" containerID="58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.045146 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkptl"] Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.047512 4879 scope.go:117] "RemoveContainer" containerID="9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.053003 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mkptl"] Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.073298 4879 scope.go:117] "RemoveContainer" containerID="c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.103922 4879 scope.go:117] "RemoveContainer" containerID="58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e" Nov 25 15:34:58 crc kubenswrapper[4879]: E1125 15:34:58.104568 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e\": container with ID starting with 58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e not found: ID does not exist" containerID="58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.104622 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e"} err="failed to get container status \"58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e\": rpc error: code = NotFound desc = could not find container \"58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e\": container with ID starting with 58bbeecc68716cff1ce6577edc330fee99b4bd1b1298c8dcad01a4188fb00b9e not found: ID does not exist" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.104661 4879 scope.go:117] "RemoveContainer" containerID="9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf" Nov 25 15:34:58 crc kubenswrapper[4879]: E1125 15:34:58.105299 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf\": container with ID starting with 9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf not found: ID does not exist" containerID="9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.105334 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf"} err="failed to get container status \"9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf\": rpc error: code = NotFound desc = could not find container \"9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf\": container with ID starting with 9f9a30687e081c8dbeb9092753b85fccb22a26816e55e2c72baf0265a76b2baf not found: ID does not exist" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.105356 4879 scope.go:117] "RemoveContainer" containerID="c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d" Nov 25 15:34:58 crc kubenswrapper[4879]: E1125 15:34:58.105754 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d\": container with ID starting with c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d not found: ID does not exist" containerID="c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d" Nov 25 15:34:58 crc kubenswrapper[4879]: I1125 15:34:58.105788 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d"} err="failed to get container status \"c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d\": rpc error: code = NotFound desc = could not find container \"c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d\": container with ID starting with c281cef491f092f78cb683bc7a58b3a7eddc2de736618047bab650c66db9368d not found: ID does not exist" Nov 25 15:34:59 crc kubenswrapper[4879]: I1125 15:34:59.653736 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97380959-ef47-4019-b20b-3b758ff9d047" path="/var/lib/kubelet/pods/97380959-ef47-4019-b20b-3b758ff9d047/volumes" Nov 25 15:35:17 crc kubenswrapper[4879]: I1125 15:35:17.409246 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:35:17 crc kubenswrapper[4879]: I1125 15:35:17.409780 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:35:17 crc kubenswrapper[4879]: I1125 15:35:17.409831 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:35:17 crc kubenswrapper[4879]: I1125 15:35:17.410393 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e942388aa2c69627a849f7912af16369f4f00ddc115880fdc841923530f1633"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:35:17 crc kubenswrapper[4879]: I1125 15:35:17.410443 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://7e942388aa2c69627a849f7912af16369f4f00ddc115880fdc841923530f1633" gracePeriod=600 Nov 25 15:35:18 crc kubenswrapper[4879]: I1125 15:35:18.153272 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="7e942388aa2c69627a849f7912af16369f4f00ddc115880fdc841923530f1633" exitCode=0 Nov 25 15:35:18 crc kubenswrapper[4879]: I1125 15:35:18.153337 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"7e942388aa2c69627a849f7912af16369f4f00ddc115880fdc841923530f1633"} Nov 25 15:35:18 crc kubenswrapper[4879]: I1125 15:35:18.153567 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7"} Nov 25 15:35:18 crc kubenswrapper[4879]: I1125 15:35:18.153589 4879 scope.go:117] "RemoveContainer" containerID="41d8391551655c7ecb95ca660ef268637b1fb72605d7c4d17fbd35f13af9216a" Nov 25 15:37:17 crc kubenswrapper[4879]: I1125 15:37:17.409482 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:37:17 crc kubenswrapper[4879]: I1125 15:37:17.410388 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:37:47 crc kubenswrapper[4879]: I1125 15:37:47.409170 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:37:47 crc kubenswrapper[4879]: I1125 15:37:47.410308 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.320930 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zwqmc"] Nov 25 15:38:03 crc kubenswrapper[4879]: E1125 15:38:03.321847 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="extract-content" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.321863 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="extract-content" Nov 25 15:38:03 crc kubenswrapper[4879]: E1125 15:38:03.321896 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="registry-server" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.321903 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="registry-server" Nov 25 15:38:03 crc kubenswrapper[4879]: E1125 15:38:03.321917 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="extract-utilities" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.321925 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="extract-utilities" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.322082 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="97380959-ef47-4019-b20b-3b758ff9d047" containerName="registry-server" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.323369 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.341165 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zwqmc"] Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.457897 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-utilities\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.458305 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-catalog-content\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.458533 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9snwf\" (UniqueName: \"kubernetes.io/projected/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-kube-api-access-9snwf\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.559990 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-utilities\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.560042 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-catalog-content\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.560115 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9snwf\" (UniqueName: \"kubernetes.io/projected/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-kube-api-access-9snwf\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.560862 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-utilities\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.560930 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-catalog-content\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.579820 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9snwf\" (UniqueName: \"kubernetes.io/projected/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-kube-api-access-9snwf\") pod \"certified-operators-zwqmc\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:03 crc kubenswrapper[4879]: I1125 15:38:03.643615 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:04 crc kubenswrapper[4879]: I1125 15:38:04.117382 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zwqmc"] Nov 25 15:38:04 crc kubenswrapper[4879]: I1125 15:38:04.454762 4879 generic.go:334] "Generic (PLEG): container finished" podID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerID="ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432" exitCode=0 Nov 25 15:38:04 crc kubenswrapper[4879]: I1125 15:38:04.454828 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerDied","Data":"ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432"} Nov 25 15:38:04 crc kubenswrapper[4879]: I1125 15:38:04.455088 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerStarted","Data":"50d15b207606202ee9502845feebd7bcfc69b2d4dd2a0083ae3f92c3b4c79d27"} Nov 25 15:38:04 crc kubenswrapper[4879]: I1125 15:38:04.456851 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:38:05 crc kubenswrapper[4879]: I1125 15:38:05.465209 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerStarted","Data":"20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817"} Nov 25 15:38:06 crc kubenswrapper[4879]: I1125 15:38:06.473646 4879 generic.go:334] "Generic (PLEG): container finished" podID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerID="20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817" exitCode=0 Nov 25 15:38:06 crc kubenswrapper[4879]: I1125 15:38:06.473742 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerDied","Data":"20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817"} Nov 25 15:38:07 crc kubenswrapper[4879]: I1125 15:38:07.482488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerStarted","Data":"28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42"} Nov 25 15:38:07 crc kubenswrapper[4879]: I1125 15:38:07.502044 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zwqmc" podStartSLOduration=1.976650606 podStartE2EDuration="4.50202403s" podCreationTimestamp="2025-11-25 15:38:03 +0000 UTC" firstStartedPulling="2025-11-25 15:38:04.456575224 +0000 UTC m=+4376.059988295" lastFinishedPulling="2025-11-25 15:38:06.981948648 +0000 UTC m=+4378.585361719" observedRunningTime="2025-11-25 15:38:07.497667924 +0000 UTC m=+4379.101081015" watchObservedRunningTime="2025-11-25 15:38:07.50202403 +0000 UTC m=+4379.105437101" Nov 25 15:38:13 crc kubenswrapper[4879]: I1125 15:38:13.652613 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:13 crc kubenswrapper[4879]: I1125 15:38:13.653190 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:13 crc kubenswrapper[4879]: I1125 15:38:13.686968 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:14 crc kubenswrapper[4879]: I1125 15:38:14.578330 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:14 crc kubenswrapper[4879]: I1125 15:38:14.628768 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zwqmc"] Nov 25 15:38:16 crc kubenswrapper[4879]: I1125 15:38:16.626100 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zwqmc" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="registry-server" containerID="cri-o://28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42" gracePeriod=2 Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.046447 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.235858 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-utilities\") pod \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.235992 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-catalog-content\") pod \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.236025 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9snwf\" (UniqueName: \"kubernetes.io/projected/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-kube-api-access-9snwf\") pod \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\" (UID: \"85f94cc2-1434-453b-ab7a-de46dfc2a6bd\") " Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.237074 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-utilities" (OuterVolumeSpecName: "utilities") pod "85f94cc2-1434-453b-ab7a-de46dfc2a6bd" (UID: "85f94cc2-1434-453b-ab7a-de46dfc2a6bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.241398 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-kube-api-access-9snwf" (OuterVolumeSpecName: "kube-api-access-9snwf") pod "85f94cc2-1434-453b-ab7a-de46dfc2a6bd" (UID: "85f94cc2-1434-453b-ab7a-de46dfc2a6bd"). InnerVolumeSpecName "kube-api-access-9snwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.283264 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85f94cc2-1434-453b-ab7a-de46dfc2a6bd" (UID: "85f94cc2-1434-453b-ab7a-de46dfc2a6bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.337840 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.337885 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.337899 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9snwf\" (UniqueName: \"kubernetes.io/projected/85f94cc2-1434-453b-ab7a-de46dfc2a6bd-kube-api-access-9snwf\") on node \"crc\" DevicePath \"\"" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.409043 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.409111 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.409190 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.410234 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.410302 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" gracePeriod=600 Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.638389 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" exitCode=0 Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.638502 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7"} Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.638556 4879 scope.go:117] "RemoveContainer" containerID="7e942388aa2c69627a849f7912af16369f4f00ddc115880fdc841923530f1633" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.642698 4879 generic.go:334] "Generic (PLEG): container finished" podID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerID="28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42" exitCode=0 Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.642726 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerDied","Data":"28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42"} Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.642746 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zwqmc" event={"ID":"85f94cc2-1434-453b-ab7a-de46dfc2a6bd","Type":"ContainerDied","Data":"50d15b207606202ee9502845feebd7bcfc69b2d4dd2a0083ae3f92c3b4c79d27"} Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.642824 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zwqmc" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.698668 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zwqmc"] Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.709070 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zwqmc"] Nov 25 15:38:17 crc kubenswrapper[4879]: E1125 15:38:17.857793 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.881567 4879 scope.go:117] "RemoveContainer" containerID="28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.901370 4879 scope.go:117] "RemoveContainer" containerID="20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.926181 4879 scope.go:117] "RemoveContainer" containerID="ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.951849 4879 scope.go:117] "RemoveContainer" containerID="28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42" Nov 25 15:38:17 crc kubenswrapper[4879]: E1125 15:38:17.952538 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42\": container with ID starting with 28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42 not found: ID does not exist" containerID="28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.952605 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42"} err="failed to get container status \"28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42\": rpc error: code = NotFound desc = could not find container \"28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42\": container with ID starting with 28ba2c9a712b6163e3aeff763230eb46237ee8cc260f1476b1ebe73d805f3e42 not found: ID does not exist" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.952693 4879 scope.go:117] "RemoveContainer" containerID="20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817" Nov 25 15:38:17 crc kubenswrapper[4879]: E1125 15:38:17.953079 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817\": container with ID starting with 20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817 not found: ID does not exist" containerID="20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.953102 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817"} err="failed to get container status \"20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817\": rpc error: code = NotFound desc = could not find container \"20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817\": container with ID starting with 20641bdc94710ff48261cd4aee85f186e89ea62d1e2a39c180e48f7c44210817 not found: ID does not exist" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.953117 4879 scope.go:117] "RemoveContainer" containerID="ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432" Nov 25 15:38:17 crc kubenswrapper[4879]: E1125 15:38:17.953389 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432\": container with ID starting with ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432 not found: ID does not exist" containerID="ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432" Nov 25 15:38:17 crc kubenswrapper[4879]: I1125 15:38:17.953412 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432"} err="failed to get container status \"ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432\": rpc error: code = NotFound desc = could not find container \"ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432\": container with ID starting with ec5c0a50b5a6a9cce98bf1c8807056f4a92271031cf16590bae50a1aa0b3d432 not found: ID does not exist" Nov 25 15:38:18 crc kubenswrapper[4879]: I1125 15:38:18.653863 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:38:18 crc kubenswrapper[4879]: E1125 15:38:18.654375 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:38:19 crc kubenswrapper[4879]: I1125 15:38:19.654894 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" path="/var/lib/kubelet/pods/85f94cc2-1434-453b-ab7a-de46dfc2a6bd/volumes" Nov 25 15:38:33 crc kubenswrapper[4879]: I1125 15:38:33.644649 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:38:33 crc kubenswrapper[4879]: E1125 15:38:33.645412 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:38:45 crc kubenswrapper[4879]: I1125 15:38:45.645214 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:38:45 crc kubenswrapper[4879]: E1125 15:38:45.652458 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:38:57 crc kubenswrapper[4879]: I1125 15:38:57.644985 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:38:57 crc kubenswrapper[4879]: E1125 15:38:57.645980 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:39:12 crc kubenswrapper[4879]: I1125 15:39:12.644773 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:39:12 crc kubenswrapper[4879]: E1125 15:39:12.645446 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:39:26 crc kubenswrapper[4879]: I1125 15:39:26.644434 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:39:26 crc kubenswrapper[4879]: E1125 15:39:26.645274 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:39:38 crc kubenswrapper[4879]: I1125 15:39:38.644509 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:39:38 crc kubenswrapper[4879]: E1125 15:39:38.645231 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:39:53 crc kubenswrapper[4879]: I1125 15:39:53.645427 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:39:53 crc kubenswrapper[4879]: E1125 15:39:53.646115 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:40:07 crc kubenswrapper[4879]: I1125 15:40:07.644291 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:40:07 crc kubenswrapper[4879]: E1125 15:40:07.645029 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:40:18 crc kubenswrapper[4879]: I1125 15:40:18.645325 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:40:18 crc kubenswrapper[4879]: E1125 15:40:18.646695 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:40:31 crc kubenswrapper[4879]: I1125 15:40:31.645103 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:40:31 crc kubenswrapper[4879]: E1125 15:40:31.645880 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:40:45 crc kubenswrapper[4879]: I1125 15:40:45.644830 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:40:45 crc kubenswrapper[4879]: E1125 15:40:45.645410 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.061345 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tp6zk"] Nov 25 15:40:59 crc kubenswrapper[4879]: E1125 15:40:59.062223 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="extract-utilities" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.062237 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="extract-utilities" Nov 25 15:40:59 crc kubenswrapper[4879]: E1125 15:40:59.062248 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="extract-content" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.062254 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="extract-content" Nov 25 15:40:59 crc kubenswrapper[4879]: E1125 15:40:59.062288 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="registry-server" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.062295 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="registry-server" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.062434 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="85f94cc2-1434-453b-ab7a-de46dfc2a6bd" containerName="registry-server" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.063549 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.071188 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tp6zk"] Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.244205 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkdjn\" (UniqueName: \"kubernetes.io/projected/b527622a-7efc-4fc1-a208-866962fcc96e-kube-api-access-rkdjn\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.244266 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-catalog-content\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.244299 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-utilities\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.346092 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-utilities\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.346212 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkdjn\" (UniqueName: \"kubernetes.io/projected/b527622a-7efc-4fc1-a208-866962fcc96e-kube-api-access-rkdjn\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.346256 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-catalog-content\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.346659 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-utilities\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.346672 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-catalog-content\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.368858 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkdjn\" (UniqueName: \"kubernetes.io/projected/b527622a-7efc-4fc1-a208-866962fcc96e-kube-api-access-rkdjn\") pod \"community-operators-tp6zk\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.384165 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.653617 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:40:59 crc kubenswrapper[4879]: E1125 15:40:59.654798 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:40:59 crc kubenswrapper[4879]: I1125 15:40:59.900758 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tp6zk"] Nov 25 15:41:00 crc kubenswrapper[4879]: I1125 15:41:00.858208 4879 generic.go:334] "Generic (PLEG): container finished" podID="b527622a-7efc-4fc1-a208-866962fcc96e" containerID="952883298f3154f086dd5eb186d8fa3ef75091acfd03658612d85603b0f2ea6e" exitCode=0 Nov 25 15:41:00 crc kubenswrapper[4879]: I1125 15:41:00.858561 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tp6zk" event={"ID":"b527622a-7efc-4fc1-a208-866962fcc96e","Type":"ContainerDied","Data":"952883298f3154f086dd5eb186d8fa3ef75091acfd03658612d85603b0f2ea6e"} Nov 25 15:41:00 crc kubenswrapper[4879]: I1125 15:41:00.858586 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tp6zk" event={"ID":"b527622a-7efc-4fc1-a208-866962fcc96e","Type":"ContainerStarted","Data":"1ace2b66f0a9fa6e11d331c9e47aa1b6b68866a676b7c0a566423cb76c6f29e7"} Nov 25 15:41:02 crc kubenswrapper[4879]: I1125 15:41:02.882885 4879 generic.go:334] "Generic (PLEG): container finished" podID="b527622a-7efc-4fc1-a208-866962fcc96e" containerID="981343c15cbf52b6ec88a8026883cbc26611f36f7e60b12772394eb2270658c8" exitCode=0 Nov 25 15:41:02 crc kubenswrapper[4879]: I1125 15:41:02.883136 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tp6zk" event={"ID":"b527622a-7efc-4fc1-a208-866962fcc96e","Type":"ContainerDied","Data":"981343c15cbf52b6ec88a8026883cbc26611f36f7e60b12772394eb2270658c8"} Nov 25 15:41:03 crc kubenswrapper[4879]: I1125 15:41:03.891701 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tp6zk" event={"ID":"b527622a-7efc-4fc1-a208-866962fcc96e","Type":"ContainerStarted","Data":"aff64b44770c566294543d35d6f5c14e05c6d177102312d06c697099fb78f9e6"} Nov 25 15:41:03 crc kubenswrapper[4879]: I1125 15:41:03.913204 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tp6zk" podStartSLOduration=2.423753335 podStartE2EDuration="4.91317947s" podCreationTimestamp="2025-11-25 15:40:59 +0000 UTC" firstStartedPulling="2025-11-25 15:41:00.859710871 +0000 UTC m=+4552.463123942" lastFinishedPulling="2025-11-25 15:41:03.349137006 +0000 UTC m=+4554.952550077" observedRunningTime="2025-11-25 15:41:03.90755269 +0000 UTC m=+4555.510965771" watchObservedRunningTime="2025-11-25 15:41:03.91317947 +0000 UTC m=+4555.516592561" Nov 25 15:41:09 crc kubenswrapper[4879]: I1125 15:41:09.385047 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:41:09 crc kubenswrapper[4879]: I1125 15:41:09.386424 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:41:09 crc kubenswrapper[4879]: I1125 15:41:09.434215 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:41:09 crc kubenswrapper[4879]: I1125 15:41:09.974935 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:41:11 crc kubenswrapper[4879]: I1125 15:41:11.047746 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tp6zk"] Nov 25 15:41:12 crc kubenswrapper[4879]: I1125 15:41:12.949691 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tp6zk" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="registry-server" containerID="cri-o://aff64b44770c566294543d35d6f5c14e05c6d177102312d06c697099fb78f9e6" gracePeriod=2 Nov 25 15:41:13 crc kubenswrapper[4879]: I1125 15:41:13.645096 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:41:13 crc kubenswrapper[4879]: E1125 15:41:13.645331 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:41:13 crc kubenswrapper[4879]: I1125 15:41:13.963667 4879 generic.go:334] "Generic (PLEG): container finished" podID="b527622a-7efc-4fc1-a208-866962fcc96e" containerID="aff64b44770c566294543d35d6f5c14e05c6d177102312d06c697099fb78f9e6" exitCode=0 Nov 25 15:41:13 crc kubenswrapper[4879]: I1125 15:41:13.963800 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tp6zk" event={"ID":"b527622a-7efc-4fc1-a208-866962fcc96e","Type":"ContainerDied","Data":"aff64b44770c566294543d35d6f5c14e05c6d177102312d06c697099fb78f9e6"} Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.575373 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.669989 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-utilities\") pod \"b527622a-7efc-4fc1-a208-866962fcc96e\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.670054 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkdjn\" (UniqueName: \"kubernetes.io/projected/b527622a-7efc-4fc1-a208-866962fcc96e-kube-api-access-rkdjn\") pod \"b527622a-7efc-4fc1-a208-866962fcc96e\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.670113 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-catalog-content\") pod \"b527622a-7efc-4fc1-a208-866962fcc96e\" (UID: \"b527622a-7efc-4fc1-a208-866962fcc96e\") " Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.671245 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-utilities" (OuterVolumeSpecName: "utilities") pod "b527622a-7efc-4fc1-a208-866962fcc96e" (UID: "b527622a-7efc-4fc1-a208-866962fcc96e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.745390 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b527622a-7efc-4fc1-a208-866962fcc96e" (UID: "b527622a-7efc-4fc1-a208-866962fcc96e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.772339 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.772379 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b527622a-7efc-4fc1-a208-866962fcc96e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.951020 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b527622a-7efc-4fc1-a208-866962fcc96e-kube-api-access-rkdjn" (OuterVolumeSpecName: "kube-api-access-rkdjn") pod "b527622a-7efc-4fc1-a208-866962fcc96e" (UID: "b527622a-7efc-4fc1-a208-866962fcc96e"). InnerVolumeSpecName "kube-api-access-rkdjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.974220 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tp6zk" event={"ID":"b527622a-7efc-4fc1-a208-866962fcc96e","Type":"ContainerDied","Data":"1ace2b66f0a9fa6e11d331c9e47aa1b6b68866a676b7c0a566423cb76c6f29e7"} Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.974287 4879 scope.go:117] "RemoveContainer" containerID="aff64b44770c566294543d35d6f5c14e05c6d177102312d06c697099fb78f9e6" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.974285 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tp6zk" Nov 25 15:41:14 crc kubenswrapper[4879]: I1125 15:41:14.974922 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkdjn\" (UniqueName: \"kubernetes.io/projected/b527622a-7efc-4fc1-a208-866962fcc96e-kube-api-access-rkdjn\") on node \"crc\" DevicePath \"\"" Nov 25 15:41:15 crc kubenswrapper[4879]: I1125 15:41:14.998614 4879 scope.go:117] "RemoveContainer" containerID="981343c15cbf52b6ec88a8026883cbc26611f36f7e60b12772394eb2270658c8" Nov 25 15:41:15 crc kubenswrapper[4879]: I1125 15:41:15.011088 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tp6zk"] Nov 25 15:41:15 crc kubenswrapper[4879]: I1125 15:41:15.017866 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tp6zk"] Nov 25 15:41:15 crc kubenswrapper[4879]: I1125 15:41:15.040848 4879 scope.go:117] "RemoveContainer" containerID="952883298f3154f086dd5eb186d8fa3ef75091acfd03658612d85603b0f2ea6e" Nov 25 15:41:15 crc kubenswrapper[4879]: I1125 15:41:15.653740 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" path="/var/lib/kubelet/pods/b527622a-7efc-4fc1-a208-866962fcc96e/volumes" Nov 25 15:41:27 crc kubenswrapper[4879]: I1125 15:41:27.645052 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:41:27 crc kubenswrapper[4879]: E1125 15:41:27.645708 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:41:38 crc kubenswrapper[4879]: I1125 15:41:38.644549 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:41:38 crc kubenswrapper[4879]: E1125 15:41:38.648015 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:41:52 crc kubenswrapper[4879]: I1125 15:41:52.645276 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:41:52 crc kubenswrapper[4879]: E1125 15:41:52.646049 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:42:04 crc kubenswrapper[4879]: I1125 15:42:04.645658 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:42:04 crc kubenswrapper[4879]: E1125 15:42:04.648008 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:42:17 crc kubenswrapper[4879]: I1125 15:42:17.645176 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:42:17 crc kubenswrapper[4879]: E1125 15:42:17.646175 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.509456 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jdfh6"] Nov 25 15:42:25 crc kubenswrapper[4879]: E1125 15:42:25.510103 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="registry-server" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.510136 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="registry-server" Nov 25 15:42:25 crc kubenswrapper[4879]: E1125 15:42:25.510180 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="extract-utilities" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.510189 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="extract-utilities" Nov 25 15:42:25 crc kubenswrapper[4879]: E1125 15:42:25.510213 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="extract-content" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.510221 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="extract-content" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.510377 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b527622a-7efc-4fc1-a208-866962fcc96e" containerName="registry-server" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.511633 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.520625 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jdfh6"] Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.543848 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-utilities\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.543956 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-catalog-content\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.544034 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9ntc\" (UniqueName: \"kubernetes.io/projected/85d6092b-7eb7-46f4-bde9-525792c53008-kube-api-access-f9ntc\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.645576 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-utilities\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.646084 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-utilities\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.646222 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-catalog-content\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.646300 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9ntc\" (UniqueName: \"kubernetes.io/projected/85d6092b-7eb7-46f4-bde9-525792c53008-kube-api-access-f9ntc\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.646949 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-catalog-content\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.671006 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9ntc\" (UniqueName: \"kubernetes.io/projected/85d6092b-7eb7-46f4-bde9-525792c53008-kube-api-access-f9ntc\") pod \"redhat-operators-jdfh6\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:25 crc kubenswrapper[4879]: I1125 15:42:25.835811 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:26 crc kubenswrapper[4879]: I1125 15:42:26.275596 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jdfh6"] Nov 25 15:42:26 crc kubenswrapper[4879]: I1125 15:42:26.576887 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdfh6" event={"ID":"85d6092b-7eb7-46f4-bde9-525792c53008","Type":"ContainerStarted","Data":"5881e14e07a489aa82aca43b9785320b220afa9762c7720184755508f30ade68"} Nov 25 15:42:27 crc kubenswrapper[4879]: I1125 15:42:27.584945 4879 generic.go:334] "Generic (PLEG): container finished" podID="85d6092b-7eb7-46f4-bde9-525792c53008" containerID="2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815" exitCode=0 Nov 25 15:42:27 crc kubenswrapper[4879]: I1125 15:42:27.584992 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdfh6" event={"ID":"85d6092b-7eb7-46f4-bde9-525792c53008","Type":"ContainerDied","Data":"2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815"} Nov 25 15:42:30 crc kubenswrapper[4879]: I1125 15:42:30.644888 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:42:30 crc kubenswrapper[4879]: E1125 15:42:30.645712 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:42:31 crc kubenswrapper[4879]: I1125 15:42:31.620482 4879 generic.go:334] "Generic (PLEG): container finished" podID="85d6092b-7eb7-46f4-bde9-525792c53008" containerID="9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c" exitCode=0 Nov 25 15:42:31 crc kubenswrapper[4879]: I1125 15:42:31.620539 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdfh6" event={"ID":"85d6092b-7eb7-46f4-bde9-525792c53008","Type":"ContainerDied","Data":"9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c"} Nov 25 15:42:33 crc kubenswrapper[4879]: I1125 15:42:33.653851 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdfh6" event={"ID":"85d6092b-7eb7-46f4-bde9-525792c53008","Type":"ContainerStarted","Data":"07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311"} Nov 25 15:42:33 crc kubenswrapper[4879]: I1125 15:42:33.672241 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jdfh6" podStartSLOduration=3.467399199 podStartE2EDuration="8.672218055s" podCreationTimestamp="2025-11-25 15:42:25 +0000 UTC" firstStartedPulling="2025-11-25 15:42:27.586987657 +0000 UTC m=+4639.190400728" lastFinishedPulling="2025-11-25 15:42:32.791806513 +0000 UTC m=+4644.395219584" observedRunningTime="2025-11-25 15:42:33.671493545 +0000 UTC m=+4645.274906616" watchObservedRunningTime="2025-11-25 15:42:33.672218055 +0000 UTC m=+4645.275631126" Nov 25 15:42:35 crc kubenswrapper[4879]: I1125 15:42:35.837547 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:35 crc kubenswrapper[4879]: I1125 15:42:35.838202 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:36 crc kubenswrapper[4879]: I1125 15:42:36.877419 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jdfh6" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="registry-server" probeResult="failure" output=< Nov 25 15:42:36 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 15:42:36 crc kubenswrapper[4879]: > Nov 25 15:42:44 crc kubenswrapper[4879]: I1125 15:42:44.645196 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:42:44 crc kubenswrapper[4879]: E1125 15:42:44.646031 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:42:45 crc kubenswrapper[4879]: I1125 15:42:45.885397 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:45 crc kubenswrapper[4879]: I1125 15:42:45.931113 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:46 crc kubenswrapper[4879]: I1125 15:42:46.124812 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jdfh6"] Nov 25 15:42:47 crc kubenswrapper[4879]: I1125 15:42:47.779261 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jdfh6" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="registry-server" containerID="cri-o://07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311" gracePeriod=2 Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.200321 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.387756 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9ntc\" (UniqueName: \"kubernetes.io/projected/85d6092b-7eb7-46f4-bde9-525792c53008-kube-api-access-f9ntc\") pod \"85d6092b-7eb7-46f4-bde9-525792c53008\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.387968 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-catalog-content\") pod \"85d6092b-7eb7-46f4-bde9-525792c53008\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.388058 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-utilities\") pod \"85d6092b-7eb7-46f4-bde9-525792c53008\" (UID: \"85d6092b-7eb7-46f4-bde9-525792c53008\") " Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.389116 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-utilities" (OuterVolumeSpecName: "utilities") pod "85d6092b-7eb7-46f4-bde9-525792c53008" (UID: "85d6092b-7eb7-46f4-bde9-525792c53008"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.396584 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85d6092b-7eb7-46f4-bde9-525792c53008-kube-api-access-f9ntc" (OuterVolumeSpecName: "kube-api-access-f9ntc") pod "85d6092b-7eb7-46f4-bde9-525792c53008" (UID: "85d6092b-7eb7-46f4-bde9-525792c53008"). InnerVolumeSpecName "kube-api-access-f9ntc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.483290 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85d6092b-7eb7-46f4-bde9-525792c53008" (UID: "85d6092b-7eb7-46f4-bde9-525792c53008"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.489781 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.489817 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85d6092b-7eb7-46f4-bde9-525792c53008-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.489831 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9ntc\" (UniqueName: \"kubernetes.io/projected/85d6092b-7eb7-46f4-bde9-525792c53008-kube-api-access-f9ntc\") on node \"crc\" DevicePath \"\"" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.789441 4879 generic.go:334] "Generic (PLEG): container finished" podID="85d6092b-7eb7-46f4-bde9-525792c53008" containerID="07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311" exitCode=0 Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.789488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdfh6" event={"ID":"85d6092b-7eb7-46f4-bde9-525792c53008","Type":"ContainerDied","Data":"07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311"} Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.789516 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jdfh6" event={"ID":"85d6092b-7eb7-46f4-bde9-525792c53008","Type":"ContainerDied","Data":"5881e14e07a489aa82aca43b9785320b220afa9762c7720184755508f30ade68"} Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.789533 4879 scope.go:117] "RemoveContainer" containerID="07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.789657 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jdfh6" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.819660 4879 scope.go:117] "RemoveContainer" containerID="9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.833282 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jdfh6"] Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.842941 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jdfh6"] Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.847437 4879 scope.go:117] "RemoveContainer" containerID="2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.880004 4879 scope.go:117] "RemoveContainer" containerID="07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311" Nov 25 15:42:48 crc kubenswrapper[4879]: E1125 15:42:48.881208 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311\": container with ID starting with 07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311 not found: ID does not exist" containerID="07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.881273 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311"} err="failed to get container status \"07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311\": rpc error: code = NotFound desc = could not find container \"07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311\": container with ID starting with 07cce6da79dbafc8697cde11235e23a959147b850235a91a649725ff7a4f2311 not found: ID does not exist" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.881314 4879 scope.go:117] "RemoveContainer" containerID="9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c" Nov 25 15:42:48 crc kubenswrapper[4879]: E1125 15:42:48.881883 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c\": container with ID starting with 9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c not found: ID does not exist" containerID="9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.881934 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c"} err="failed to get container status \"9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c\": rpc error: code = NotFound desc = could not find container \"9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c\": container with ID starting with 9f397ccd16d649f76425294558eb45f8aad2501c7ed66695f7581b321a41b19c not found: ID does not exist" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.881962 4879 scope.go:117] "RemoveContainer" containerID="2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815" Nov 25 15:42:48 crc kubenswrapper[4879]: E1125 15:42:48.882367 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815\": container with ID starting with 2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815 not found: ID does not exist" containerID="2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815" Nov 25 15:42:48 crc kubenswrapper[4879]: I1125 15:42:48.882433 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815"} err="failed to get container status \"2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815\": rpc error: code = NotFound desc = could not find container \"2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815\": container with ID starting with 2aa5e273112d69f022ecd544e092c9daa71459250378ddb60ff568e567f72815 not found: ID does not exist" Nov 25 15:42:49 crc kubenswrapper[4879]: I1125 15:42:49.656307 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" path="/var/lib/kubelet/pods/85d6092b-7eb7-46f4-bde9-525792c53008/volumes" Nov 25 15:42:59 crc kubenswrapper[4879]: I1125 15:42:59.653537 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:42:59 crc kubenswrapper[4879]: E1125 15:42:59.654667 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:43:11 crc kubenswrapper[4879]: I1125 15:43:11.644448 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:43:11 crc kubenswrapper[4879]: E1125 15:43:11.645245 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:43:26 crc kubenswrapper[4879]: I1125 15:43:26.645071 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:43:27 crc kubenswrapper[4879]: I1125 15:43:27.120247 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"28ee074ec08968e61681d80dad6c90c50953242ae0a04e644b744013e9695122"} Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.030305 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-kvkpx"] Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.035073 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-kvkpx"] Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.166935 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-nxwtl"] Nov 25 15:44:46 crc kubenswrapper[4879]: E1125 15:44:46.167289 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="registry-server" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.167315 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="registry-server" Nov 25 15:44:46 crc kubenswrapper[4879]: E1125 15:44:46.167345 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="extract-content" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.167353 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="extract-content" Nov 25 15:44:46 crc kubenswrapper[4879]: E1125 15:44:46.167376 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="extract-utilities" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.167384 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="extract-utilities" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.167556 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="85d6092b-7eb7-46f4-bde9-525792c53008" containerName="registry-server" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.168481 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.170672 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.170973 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.171328 4879 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-47r78" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.171471 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.175042 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-nxwtl"] Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.238456 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-crc-storage\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.238515 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-node-mnt\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.238556 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vhkj\" (UniqueName: \"kubernetes.io/projected/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-kube-api-access-2vhkj\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.339453 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vhkj\" (UniqueName: \"kubernetes.io/projected/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-kube-api-access-2vhkj\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.339558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-crc-storage\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.339582 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-node-mnt\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.339890 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-node-mnt\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.342005 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-crc-storage\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.362922 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vhkj\" (UniqueName: \"kubernetes.io/projected/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-kube-api-access-2vhkj\") pod \"crc-storage-crc-nxwtl\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.522033 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.945537 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-nxwtl"] Nov 25 15:44:46 crc kubenswrapper[4879]: I1125 15:44:46.957814 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:44:47 crc kubenswrapper[4879]: I1125 15:44:47.664258 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73a2604b-9d34-4dee-ab7a-1741dcda9c58" path="/var/lib/kubelet/pods/73a2604b-9d34-4dee-ab7a-1741dcda9c58/volumes" Nov 25 15:44:47 crc kubenswrapper[4879]: I1125 15:44:47.772609 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-nxwtl" event={"ID":"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8","Type":"ContainerStarted","Data":"bc1421b1c2558e58ff1ba0c8426057af995da9433e47e1faa2d66169dda8e680"} Nov 25 15:44:47 crc kubenswrapper[4879]: I1125 15:44:47.772656 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-nxwtl" event={"ID":"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8","Type":"ContainerStarted","Data":"e135ca3292ce94062ec8eb6f6871b0af42ec54ba5e3855fa26cfd263bcd2b64a"} Nov 25 15:44:48 crc kubenswrapper[4879]: I1125 15:44:48.781262 4879 generic.go:334] "Generic (PLEG): container finished" podID="fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" containerID="bc1421b1c2558e58ff1ba0c8426057af995da9433e47e1faa2d66169dda8e680" exitCode=0 Nov 25 15:44:48 crc kubenswrapper[4879]: I1125 15:44:48.781308 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-nxwtl" event={"ID":"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8","Type":"ContainerDied","Data":"bc1421b1c2558e58ff1ba0c8426057af995da9433e47e1faa2d66169dda8e680"} Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.072655 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.195312 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vhkj\" (UniqueName: \"kubernetes.io/projected/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-kube-api-access-2vhkj\") pod \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.195430 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-node-mnt\") pod \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.195502 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-crc-storage\") pod \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\" (UID: \"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8\") " Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.197450 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" (UID: "fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.203385 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-kube-api-access-2vhkj" (OuterVolumeSpecName: "kube-api-access-2vhkj") pod "fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" (UID: "fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8"). InnerVolumeSpecName "kube-api-access-2vhkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.216479 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" (UID: "fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.298183 4879 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.298244 4879 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.298259 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vhkj\" (UniqueName: \"kubernetes.io/projected/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8-kube-api-access-2vhkj\") on node \"crc\" DevicePath \"\"" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.799416 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-nxwtl" event={"ID":"fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8","Type":"ContainerDied","Data":"e135ca3292ce94062ec8eb6f6871b0af42ec54ba5e3855fa26cfd263bcd2b64a"} Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.799816 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e135ca3292ce94062ec8eb6f6871b0af42ec54ba5e3855fa26cfd263bcd2b64a" Nov 25 15:44:50 crc kubenswrapper[4879]: I1125 15:44:50.799502 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-nxwtl" Nov 25 15:44:51 crc kubenswrapper[4879]: I1125 15:44:51.996509 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-nxwtl"] Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.004430 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-nxwtl"] Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.130983 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-w9tzg"] Nov 25 15:44:52 crc kubenswrapper[4879]: E1125 15:44:52.131503 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" containerName="storage" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.131547 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" containerName="storage" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.131822 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" containerName="storage" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.132679 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.135262 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.135662 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.135769 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.142385 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-w9tzg"] Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.142582 4879 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-47r78" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.234691 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68zgn\" (UniqueName: \"kubernetes.io/projected/d3305d42-a394-40f6-9d7a-faab74c5248d-kube-api-access-68zgn\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.234789 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3305d42-a394-40f6-9d7a-faab74c5248d-crc-storage\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.234841 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3305d42-a394-40f6-9d7a-faab74c5248d-node-mnt\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.335676 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68zgn\" (UniqueName: \"kubernetes.io/projected/d3305d42-a394-40f6-9d7a-faab74c5248d-kube-api-access-68zgn\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.335983 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3305d42-a394-40f6-9d7a-faab74c5248d-crc-storage\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.336045 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3305d42-a394-40f6-9d7a-faab74c5248d-node-mnt\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.336228 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3305d42-a394-40f6-9d7a-faab74c5248d-node-mnt\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.336690 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3305d42-a394-40f6-9d7a-faab74c5248d-crc-storage\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.358100 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68zgn\" (UniqueName: \"kubernetes.io/projected/d3305d42-a394-40f6-9d7a-faab74c5248d-kube-api-access-68zgn\") pod \"crc-storage-crc-w9tzg\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.454634 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:52 crc kubenswrapper[4879]: I1125 15:44:52.875238 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-w9tzg"] Nov 25 15:44:52 crc kubenswrapper[4879]: W1125 15:44:52.882688 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3305d42_a394_40f6_9d7a_faab74c5248d.slice/crio-d029a2b2c0e05dad4ee25ba5fabbe9a757077ee0e2ceb115b206e076040abb6a WatchSource:0}: Error finding container d029a2b2c0e05dad4ee25ba5fabbe9a757077ee0e2ceb115b206e076040abb6a: Status 404 returned error can't find the container with id d029a2b2c0e05dad4ee25ba5fabbe9a757077ee0e2ceb115b206e076040abb6a Nov 25 15:44:53 crc kubenswrapper[4879]: I1125 15:44:53.654246 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8" path="/var/lib/kubelet/pods/fa0ee6a6-f1c1-4bdc-9139-917b3c443bf8/volumes" Nov 25 15:44:53 crc kubenswrapper[4879]: I1125 15:44:53.827901 4879 generic.go:334] "Generic (PLEG): container finished" podID="d3305d42-a394-40f6-9d7a-faab74c5248d" containerID="71fe8c6eb95e5cf117d8523a9de7e3ac79965e38e62250bdddd58cfd072413ba" exitCode=0 Nov 25 15:44:53 crc kubenswrapper[4879]: I1125 15:44:53.827945 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w9tzg" event={"ID":"d3305d42-a394-40f6-9d7a-faab74c5248d","Type":"ContainerDied","Data":"71fe8c6eb95e5cf117d8523a9de7e3ac79965e38e62250bdddd58cfd072413ba"} Nov 25 15:44:53 crc kubenswrapper[4879]: I1125 15:44:53.827992 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w9tzg" event={"ID":"d3305d42-a394-40f6-9d7a-faab74c5248d","Type":"ContainerStarted","Data":"d029a2b2c0e05dad4ee25ba5fabbe9a757077ee0e2ceb115b206e076040abb6a"} Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.098393 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.280394 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3305d42-a394-40f6-9d7a-faab74c5248d-node-mnt\") pod \"d3305d42-a394-40f6-9d7a-faab74c5248d\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.280521 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3305d42-a394-40f6-9d7a-faab74c5248d-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d3305d42-a394-40f6-9d7a-faab74c5248d" (UID: "d3305d42-a394-40f6-9d7a-faab74c5248d"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.280559 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3305d42-a394-40f6-9d7a-faab74c5248d-crc-storage\") pod \"d3305d42-a394-40f6-9d7a-faab74c5248d\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.280640 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68zgn\" (UniqueName: \"kubernetes.io/projected/d3305d42-a394-40f6-9d7a-faab74c5248d-kube-api-access-68zgn\") pod \"d3305d42-a394-40f6-9d7a-faab74c5248d\" (UID: \"d3305d42-a394-40f6-9d7a-faab74c5248d\") " Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.281107 4879 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3305d42-a394-40f6-9d7a-faab74c5248d-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.286114 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3305d42-a394-40f6-9d7a-faab74c5248d-kube-api-access-68zgn" (OuterVolumeSpecName: "kube-api-access-68zgn") pod "d3305d42-a394-40f6-9d7a-faab74c5248d" (UID: "d3305d42-a394-40f6-9d7a-faab74c5248d"). InnerVolumeSpecName "kube-api-access-68zgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.298592 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3305d42-a394-40f6-9d7a-faab74c5248d-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d3305d42-a394-40f6-9d7a-faab74c5248d" (UID: "d3305d42-a394-40f6-9d7a-faab74c5248d"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.382717 4879 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3305d42-a394-40f6-9d7a-faab74c5248d-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.382784 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68zgn\" (UniqueName: \"kubernetes.io/projected/d3305d42-a394-40f6-9d7a-faab74c5248d-kube-api-access-68zgn\") on node \"crc\" DevicePath \"\"" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.849677 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w9tzg" event={"ID":"d3305d42-a394-40f6-9d7a-faab74c5248d","Type":"ContainerDied","Data":"d029a2b2c0e05dad4ee25ba5fabbe9a757077ee0e2ceb115b206e076040abb6a"} Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.849766 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d029a2b2c0e05dad4ee25ba5fabbe9a757077ee0e2ceb115b206e076040abb6a" Nov 25 15:44:55 crc kubenswrapper[4879]: I1125 15:44:55.849800 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w9tzg" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.154019 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz"] Nov 25 15:45:00 crc kubenswrapper[4879]: E1125 15:45:00.154651 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3305d42-a394-40f6-9d7a-faab74c5248d" containerName="storage" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.154666 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3305d42-a394-40f6-9d7a-faab74c5248d" containerName="storage" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.154790 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3305d42-a394-40f6-9d7a-faab74c5248d" containerName="storage" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.155284 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.157824 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.163442 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.190710 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz"] Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.354909 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a05c7385-b05c-4da3-b47d-25dfd77376b2-secret-volume\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.354981 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a05c7385-b05c-4da3-b47d-25dfd77376b2-config-volume\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.355094 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfq2r\" (UniqueName: \"kubernetes.io/projected/a05c7385-b05c-4da3-b47d-25dfd77376b2-kube-api-access-hfq2r\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.456557 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a05c7385-b05c-4da3-b47d-25dfd77376b2-secret-volume\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.456634 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a05c7385-b05c-4da3-b47d-25dfd77376b2-config-volume\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.456683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfq2r\" (UniqueName: \"kubernetes.io/projected/a05c7385-b05c-4da3-b47d-25dfd77376b2-kube-api-access-hfq2r\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.457818 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a05c7385-b05c-4da3-b47d-25dfd77376b2-config-volume\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.462891 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a05c7385-b05c-4da3-b47d-25dfd77376b2-secret-volume\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.479480 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfq2r\" (UniqueName: \"kubernetes.io/projected/a05c7385-b05c-4da3-b47d-25dfd77376b2-kube-api-access-hfq2r\") pod \"collect-profiles-29401425-gj9sz\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:00 crc kubenswrapper[4879]: I1125 15:45:00.776207 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:01 crc kubenswrapper[4879]: I1125 15:45:01.239014 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz"] Nov 25 15:45:01 crc kubenswrapper[4879]: I1125 15:45:01.904285 4879 generic.go:334] "Generic (PLEG): container finished" podID="a05c7385-b05c-4da3-b47d-25dfd77376b2" containerID="13516a8fdb7ee64c776bf318974a6569e7f5c14a7aa72268e6adaa6c87f4140d" exitCode=0 Nov 25 15:45:01 crc kubenswrapper[4879]: I1125 15:45:01.904562 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" event={"ID":"a05c7385-b05c-4da3-b47d-25dfd77376b2","Type":"ContainerDied","Data":"13516a8fdb7ee64c776bf318974a6569e7f5c14a7aa72268e6adaa6c87f4140d"} Nov 25 15:45:01 crc kubenswrapper[4879]: I1125 15:45:01.904594 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" event={"ID":"a05c7385-b05c-4da3-b47d-25dfd77376b2","Type":"ContainerStarted","Data":"1cb200ba233ee9097261fb3051e474a669881bc756770950ccbe9ae9a5183c06"} Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.185987 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.198604 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfq2r\" (UniqueName: \"kubernetes.io/projected/a05c7385-b05c-4da3-b47d-25dfd77376b2-kube-api-access-hfq2r\") pod \"a05c7385-b05c-4da3-b47d-25dfd77376b2\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.198759 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a05c7385-b05c-4da3-b47d-25dfd77376b2-config-volume\") pod \"a05c7385-b05c-4da3-b47d-25dfd77376b2\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.198963 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a05c7385-b05c-4da3-b47d-25dfd77376b2-secret-volume\") pod \"a05c7385-b05c-4da3-b47d-25dfd77376b2\" (UID: \"a05c7385-b05c-4da3-b47d-25dfd77376b2\") " Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.200634 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a05c7385-b05c-4da3-b47d-25dfd77376b2-config-volume" (OuterVolumeSpecName: "config-volume") pod "a05c7385-b05c-4da3-b47d-25dfd77376b2" (UID: "a05c7385-b05c-4da3-b47d-25dfd77376b2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.208375 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a05c7385-b05c-4da3-b47d-25dfd77376b2-kube-api-access-hfq2r" (OuterVolumeSpecName: "kube-api-access-hfq2r") pod "a05c7385-b05c-4da3-b47d-25dfd77376b2" (UID: "a05c7385-b05c-4da3-b47d-25dfd77376b2"). InnerVolumeSpecName "kube-api-access-hfq2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.211248 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a05c7385-b05c-4da3-b47d-25dfd77376b2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a05c7385-b05c-4da3-b47d-25dfd77376b2" (UID: "a05c7385-b05c-4da3-b47d-25dfd77376b2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.300510 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfq2r\" (UniqueName: \"kubernetes.io/projected/a05c7385-b05c-4da3-b47d-25dfd77376b2-kube-api-access-hfq2r\") on node \"crc\" DevicePath \"\"" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.300543 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a05c7385-b05c-4da3-b47d-25dfd77376b2-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.300554 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a05c7385-b05c-4da3-b47d-25dfd77376b2-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.919340 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" event={"ID":"a05c7385-b05c-4da3-b47d-25dfd77376b2","Type":"ContainerDied","Data":"1cb200ba233ee9097261fb3051e474a669881bc756770950ccbe9ae9a5183c06"} Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.919390 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cb200ba233ee9097261fb3051e474a669881bc756770950ccbe9ae9a5183c06" Nov 25 15:45:03 crc kubenswrapper[4879]: I1125 15:45:03.919389 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz" Nov 25 15:45:04 crc kubenswrapper[4879]: I1125 15:45:04.307120 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss"] Nov 25 15:45:04 crc kubenswrapper[4879]: I1125 15:45:04.313479 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401380-gvtss"] Nov 25 15:45:05 crc kubenswrapper[4879]: I1125 15:45:05.654032 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52" path="/var/lib/kubelet/pods/a8b5c4be-23bd-4e4a-a53f-f9a7a8d5cb52/volumes" Nov 25 15:45:42 crc kubenswrapper[4879]: I1125 15:45:42.540360 4879 scope.go:117] "RemoveContainer" containerID="300d5f6e2ac3e797b8e4321551487c763a497386a386fa1f6fac1fbf246a1394" Nov 25 15:45:42 crc kubenswrapper[4879]: I1125 15:45:42.565878 4879 scope.go:117] "RemoveContainer" containerID="bf29b7d952df2d9d2ec65f9fee0fb0b0d092d1d27e181b9f4059f176c47b8889" Nov 25 15:45:47 crc kubenswrapper[4879]: I1125 15:45:47.408904 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:45:47 crc kubenswrapper[4879]: I1125 15:45:47.409475 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:46:17 crc kubenswrapper[4879]: I1125 15:46:17.409484 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:46:17 crc kubenswrapper[4879]: I1125 15:46:17.410222 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.408400 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.408970 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.409021 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.409700 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28ee074ec08968e61681d80dad6c90c50953242ae0a04e644b744013e9695122"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.409769 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://28ee074ec08968e61681d80dad6c90c50953242ae0a04e644b744013e9695122" gracePeriod=600 Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.719699 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="28ee074ec08968e61681d80dad6c90c50953242ae0a04e644b744013e9695122" exitCode=0 Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.719784 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"28ee074ec08968e61681d80dad6c90c50953242ae0a04e644b744013e9695122"} Nov 25 15:46:47 crc kubenswrapper[4879]: I1125 15:46:47.720054 4879 scope.go:117] "RemoveContainer" containerID="a53f579db3eab9ecafaf4c5446f9765fc59b605e54446014eb9521639fb484f7" Nov 25 15:46:48 crc kubenswrapper[4879]: I1125 15:46:48.730087 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38"} Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.111411 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jn5v4"] Nov 25 15:48:11 crc kubenswrapper[4879]: E1125 15:48:11.112184 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a05c7385-b05c-4da3-b47d-25dfd77376b2" containerName="collect-profiles" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.112202 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a05c7385-b05c-4da3-b47d-25dfd77376b2" containerName="collect-profiles" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.112416 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a05c7385-b05c-4da3-b47d-25dfd77376b2" containerName="collect-profiles" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.113384 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.117498 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.125522 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.125703 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.133702 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.133971 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xxnwp" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.134829 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jn5v4"] Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.226243 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-config\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.226335 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj7vz\" (UniqueName: \"kubernetes.io/projected/240e07c7-3799-433b-848a-8346df71cf89-kube-api-access-fj7vz\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.226418 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.327707 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.327796 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-config\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.327849 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj7vz\" (UniqueName: \"kubernetes.io/projected/240e07c7-3799-433b-848a-8346df71cf89-kube-api-access-fj7vz\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.328864 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.328924 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-config\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.352391 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj7vz\" (UniqueName: \"kubernetes.io/projected/240e07c7-3799-433b-848a-8346df71cf89-kube-api-access-fj7vz\") pod \"dnsmasq-dns-5d7b5456f5-jn5v4\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.402621 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-mvgxt"] Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.404523 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.418250 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-mvgxt"] Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.430990 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.530040 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.530111 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-config\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.530149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4jwz\" (UniqueName: \"kubernetes.io/projected/ea2ced60-4ddd-42db-a2c3-577cb848f906-kube-api-access-n4jwz\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.631809 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.632097 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-config\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.632136 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4jwz\" (UniqueName: \"kubernetes.io/projected/ea2ced60-4ddd-42db-a2c3-577cb848f906-kube-api-access-n4jwz\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.633639 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-config\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.634173 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.653203 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4jwz\" (UniqueName: \"kubernetes.io/projected/ea2ced60-4ddd-42db-a2c3-577cb848f906-kube-api-access-n4jwz\") pod \"dnsmasq-dns-98ddfc8f-mvgxt\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.713079 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jn5v4"] Nov 25 15:48:11 crc kubenswrapper[4879]: I1125 15:48:11.728616 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.171206 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-mvgxt"] Nov 25 15:48:12 crc kubenswrapper[4879]: W1125 15:48:12.179261 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea2ced60_4ddd_42db_a2c3_577cb848f906.slice/crio-8eb9679825fbb0c264c750761ae6abf835740e91af94e1c1b9fa63468dd98771 WatchSource:0}: Error finding container 8eb9679825fbb0c264c750761ae6abf835740e91af94e1c1b9fa63468dd98771: Status 404 returned error can't find the container with id 8eb9679825fbb0c264c750761ae6abf835740e91af94e1c1b9fa63468dd98771 Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.246812 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.248157 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.249927 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.249942 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.249935 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-s69kq" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.250471 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.251863 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.260915 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340284 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0da22dcb-938f-4b60-b6b2-a428bc5bede3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340334 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340361 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340415 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340474 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340500 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0da22dcb-938f-4b60-b6b2-a428bc5bede3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340634 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhswr\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-kube-api-access-zhswr\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.340784 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.370967 4879 generic.go:334] "Generic (PLEG): container finished" podID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerID="0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549" exitCode=0 Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.371191 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" event={"ID":"ea2ced60-4ddd-42db-a2c3-577cb848f906","Type":"ContainerDied","Data":"0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549"} Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.371398 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" event={"ID":"ea2ced60-4ddd-42db-a2c3-577cb848f906","Type":"ContainerStarted","Data":"8eb9679825fbb0c264c750761ae6abf835740e91af94e1c1b9fa63468dd98771"} Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.383252 4879 generic.go:334] "Generic (PLEG): container finished" podID="240e07c7-3799-433b-848a-8346df71cf89" containerID="ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a" exitCode=0 Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.383303 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" event={"ID":"240e07c7-3799-433b-848a-8346df71cf89","Type":"ContainerDied","Data":"ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a"} Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.383357 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" event={"ID":"240e07c7-3799-433b-848a-8346df71cf89","Type":"ContainerStarted","Data":"4903b4524d0df3026e5ae53f07c470aaf3cc1875e390dbd4845f8eea8bdf3d2e"} Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.441909 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0da22dcb-938f-4b60-b6b2-a428bc5bede3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.441957 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.441977 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.442034 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.442055 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.442076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0da22dcb-938f-4b60-b6b2-a428bc5bede3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.442099 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.442166 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhswr\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-kube-api-access-zhswr\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.442224 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.443375 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.444103 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.445061 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.445328 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.446520 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0da22dcb-938f-4b60-b6b2-a428bc5bede3-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.446619 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.448141 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.448272 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/52716e15751c55389f46a7427f09f295fbea86d2056bda7fbe49a8347e32d922/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.453491 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0da22dcb-938f-4b60-b6b2-a428bc5bede3-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.460733 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhswr\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-kube-api-access-zhswr\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.480333 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.547284 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.548519 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.551354 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.552015 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.552078 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.552282 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.552370 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dllx2" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.562150 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.575869 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.644792 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.645230 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.645361 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.645493 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkfxz\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-kube-api-access-kkfxz\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.645603 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.646380 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9890ff93-76f9-4d51-a567-68bab2a4fda8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.646420 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.646539 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.646576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9890ff93-76f9-4d51-a567-68bab2a4fda8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.748399 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749172 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749205 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749219 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749328 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkfxz\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-kube-api-access-kkfxz\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749364 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749432 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9890ff93-76f9-4d51-a567-68bab2a4fda8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749449 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749530 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.749552 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9890ff93-76f9-4d51-a567-68bab2a4fda8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.750015 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.751251 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.751286 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.755664 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.757925 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9890ff93-76f9-4d51-a567-68bab2a4fda8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.759981 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9890ff93-76f9-4d51-a567-68bab2a4fda8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.760102 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.760155 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8b57da8dd8d5f860806fbeafe2314c1046ba0e4dec12aeb61b37fa286902e8be/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.768507 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkfxz\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-kube-api-access-kkfxz\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.796910 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:12 crc kubenswrapper[4879]: I1125 15:48:12.925343 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.031925 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.135276 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.139111 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.143551 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.144170 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.144320 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-sbgzj" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.144503 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.152998 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 25 15:48:13 crc kubenswrapper[4879]: W1125 15:48:13.152998 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9890ff93_76f9_4d51_a567_68bab2a4fda8.slice/crio-5a455806d51bbe3b46c38eed462f8af93c39135bab2dd04e423195873e97cef4 WatchSource:0}: Error finding container 5a455806d51bbe3b46c38eed462f8af93c39135bab2dd04e423195873e97cef4: Status 404 returned error can't find the container with id 5a455806d51bbe3b46c38eed462f8af93c39135bab2dd04e423195873e97cef4 Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.160493 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.168684 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256270 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256378 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spbpj\" (UniqueName: \"kubernetes.io/projected/6d6cce1f-4a79-40b0-b252-83b49b6a4770-kube-api-access-spbpj\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256415 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-kolla-config\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256456 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6d6cce1f-4a79-40b0-b252-83b49b6a4770-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256482 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d6cce1f-4a79-40b0-b252-83b49b6a4770-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256505 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-config-data-default\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256533 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6cce1f-4a79-40b0-b252-83b49b6a4770-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.256709 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.361883 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6d6cce1f-4a79-40b0-b252-83b49b6a4770-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.361933 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d6cce1f-4a79-40b0-b252-83b49b6a4770-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.361951 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-config-data-default\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.361972 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6cce1f-4a79-40b0-b252-83b49b6a4770-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.362009 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.362045 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.362095 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spbpj\" (UniqueName: \"kubernetes.io/projected/6d6cce1f-4a79-40b0-b252-83b49b6a4770-kube-api-access-spbpj\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.362116 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-kolla-config\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.362454 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6d6cce1f-4a79-40b0-b252-83b49b6a4770-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.363082 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-kolla-config\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.363239 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-config-data-default\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.363641 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d6cce1f-4a79-40b0-b252-83b49b6a4770-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.366620 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6cce1f-4a79-40b0-b252-83b49b6a4770-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.366679 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d6cce1f-4a79-40b0-b252-83b49b6a4770-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.367419 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.367450 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/dfbdd69cf208c71880c26681047cbaf8ceaa672418c20fb2604f1e37b0875089/globalmount\"" pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.383777 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spbpj\" (UniqueName: \"kubernetes.io/projected/6d6cce1f-4a79-40b0-b252-83b49b6a4770-kube-api-access-spbpj\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.393758 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0da22dcb-938f-4b60-b6b2-a428bc5bede3","Type":"ContainerStarted","Data":"29f9e609c0ae4394ed74cad5b1e736b58886eac4f1871420abfb1d2f93346ba9"} Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.396334 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" event={"ID":"240e07c7-3799-433b-848a-8346df71cf89","Type":"ContainerStarted","Data":"c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e"} Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.396456 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.398983 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" event={"ID":"ea2ced60-4ddd-42db-a2c3-577cb848f906","Type":"ContainerStarted","Data":"b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0"} Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.399326 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.400201 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9890ff93-76f9-4d51-a567-68bab2a4fda8","Type":"ContainerStarted","Data":"5a455806d51bbe3b46c38eed462f8af93c39135bab2dd04e423195873e97cef4"} Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.401182 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbc6dee3-7452-4ea7-a6a5-948590bd5672\") pod \"openstack-galera-0\" (UID: \"6d6cce1f-4a79-40b0-b252-83b49b6a4770\") " pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.425722 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" podStartSLOduration=2.42569823 podStartE2EDuration="2.42569823s" podCreationTimestamp="2025-11-25 15:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:13.416423071 +0000 UTC m=+4985.019836152" watchObservedRunningTime="2025-11-25 15:48:13.42569823 +0000 UTC m=+4985.029111291" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.432088 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" podStartSLOduration=2.432069332 podStartE2EDuration="2.432069332s" podCreationTimestamp="2025-11-25 15:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:13.430732096 +0000 UTC m=+4985.034145177" watchObservedRunningTime="2025-11-25 15:48:13.432069332 +0000 UTC m=+4985.035482403" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.464479 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.511546 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.514213 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.516927 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.517329 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-rzwb9" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.519535 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.565783 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-config-data\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.565861 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-kolla-config\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.565978 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mw2k\" (UniqueName: \"kubernetes.io/projected/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-kube-api-access-9mw2k\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.669604 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-config-data\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.669948 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-kolla-config\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.670003 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mw2k\" (UniqueName: \"kubernetes.io/projected/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-kube-api-access-9mw2k\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.671889 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-kolla-config\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.674187 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-config-data\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.745469 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mw2k\" (UniqueName: \"kubernetes.io/projected/9e5810bb-73c5-4c0c-9fd7-07556c6a600a-kube-api-access-9mw2k\") pod \"memcached-0\" (UID: \"9e5810bb-73c5-4c0c-9fd7-07556c6a600a\") " pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.867739 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 25 15:48:13 crc kubenswrapper[4879]: I1125 15:48:13.951940 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 25 15:48:13 crc kubenswrapper[4879]: W1125 15:48:13.977660 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d6cce1f_4a79_40b0_b252_83b49b6a4770.slice/crio-f767f64b3848301b2abab27df24e91eff574cceb0f371a5c6d096fce8ce4d386 WatchSource:0}: Error finding container f767f64b3848301b2abab27df24e91eff574cceb0f371a5c6d096fce8ce4d386: Status 404 returned error can't find the container with id f767f64b3848301b2abab27df24e91eff574cceb0f371a5c6d096fce8ce4d386 Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.120108 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 25 15:48:14 crc kubenswrapper[4879]: W1125 15:48:14.128881 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e5810bb_73c5_4c0c_9fd7_07556c6a600a.slice/crio-167083801f4bc961949d71935069dc031740195ddca6d1c4ce43f1c169640da4 WatchSource:0}: Error finding container 167083801f4bc961949d71935069dc031740195ddca6d1c4ce43f1c169640da4: Status 404 returned error can't find the container with id 167083801f4bc961949d71935069dc031740195ddca6d1c4ce43f1c169640da4 Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.411159 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"9e5810bb-73c5-4c0c-9fd7-07556c6a600a","Type":"ContainerStarted","Data":"fdfe2cb9e2ff83eb132719e10a381cf4bc20fd597308b693f5819414bf847d6c"} Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.411264 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.411287 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"9e5810bb-73c5-4c0c-9fd7-07556c6a600a","Type":"ContainerStarted","Data":"167083801f4bc961949d71935069dc031740195ddca6d1c4ce43f1c169640da4"} Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.416531 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6d6cce1f-4a79-40b0-b252-83b49b6a4770","Type":"ContainerStarted","Data":"b96f9cdb659f42908681d3832f94ae9fd6e856f84d50e82e3be08b7d9035aa26"} Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.416602 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6d6cce1f-4a79-40b0-b252-83b49b6a4770","Type":"ContainerStarted","Data":"f767f64b3848301b2abab27df24e91eff574cceb0f371a5c6d096fce8ce4d386"} Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.419590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9890ff93-76f9-4d51-a567-68bab2a4fda8","Type":"ContainerStarted","Data":"713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59"} Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.422639 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0da22dcb-938f-4b60-b6b2-a428bc5bede3","Type":"ContainerStarted","Data":"8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3"} Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.439669 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.439651911 podStartE2EDuration="1.439651911s" podCreationTimestamp="2025-11-25 15:48:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:14.436748673 +0000 UTC m=+4986.040161754" watchObservedRunningTime="2025-11-25 15:48:14.439651911 +0000 UTC m=+4986.043064982" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.721443 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.726923 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.729468 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.729723 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.730168 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-s7vcm" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.730288 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.731772 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.893964 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.894533 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce69d75-a372-4ec2-87d3-c84104c18dc6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.894742 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce69d75-a372-4ec2-87d3-c84104c18dc6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.894819 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/dce69d75-a372-4ec2-87d3-c84104c18dc6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.894849 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.894973 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clwnh\" (UniqueName: \"kubernetes.io/projected/dce69d75-a372-4ec2-87d3-c84104c18dc6-kube-api-access-clwnh\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.895245 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.895316 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997149 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce69d75-a372-4ec2-87d3-c84104c18dc6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997207 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce69d75-a372-4ec2-87d3-c84104c18dc6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997242 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/dce69d75-a372-4ec2-87d3-c84104c18dc6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997265 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997309 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clwnh\" (UniqueName: \"kubernetes.io/projected/dce69d75-a372-4ec2-87d3-c84104c18dc6-kube-api-access-clwnh\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997331 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997352 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.997390 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.998098 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/dce69d75-a372-4ec2-87d3-c84104c18dc6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.999291 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.999437 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:14 crc kubenswrapper[4879]: I1125 15:48:14.999540 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dce69d75-a372-4ec2-87d3-c84104c18dc6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.002323 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.002696 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2f6ea5b2824315d768382ec21ae849540c3673ee6f566e2e5371e2ad27928340/globalmount\"" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.004274 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/dce69d75-a372-4ec2-87d3-c84104c18dc6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.005097 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dce69d75-a372-4ec2-87d3-c84104c18dc6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.019336 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clwnh\" (UniqueName: \"kubernetes.io/projected/dce69d75-a372-4ec2-87d3-c84104c18dc6-kube-api-access-clwnh\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.036969 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4a9d1d82-7413-48c9-8683-4171f395e37d\") pod \"openstack-cell1-galera-0\" (UID: \"dce69d75-a372-4ec2-87d3-c84104c18dc6\") " pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.048775 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:15 crc kubenswrapper[4879]: I1125 15:48:15.967363 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 25 15:48:15 crc kubenswrapper[4879]: W1125 15:48:15.973371 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddce69d75_a372_4ec2_87d3_c84104c18dc6.slice/crio-2410245db630b18c019fc4ca340712f2b88bfeca4f4c73b88014257269d7d911 WatchSource:0}: Error finding container 2410245db630b18c019fc4ca340712f2b88bfeca4f4c73b88014257269d7d911: Status 404 returned error can't find the container with id 2410245db630b18c019fc4ca340712f2b88bfeca4f4c73b88014257269d7d911 Nov 25 15:48:16 crc kubenswrapper[4879]: I1125 15:48:16.657720 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"dce69d75-a372-4ec2-87d3-c84104c18dc6","Type":"ContainerStarted","Data":"b7d3adb15535f6cf5155b59a701b575cf7f62392bd5074ed3644d650ca11cbe9"} Nov 25 15:48:16 crc kubenswrapper[4879]: I1125 15:48:16.657778 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"dce69d75-a372-4ec2-87d3-c84104c18dc6","Type":"ContainerStarted","Data":"2410245db630b18c019fc4ca340712f2b88bfeca4f4c73b88014257269d7d911"} Nov 25 15:48:19 crc kubenswrapper[4879]: I1125 15:48:19.690799 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d6cce1f-4a79-40b0-b252-83b49b6a4770" containerID="b96f9cdb659f42908681d3832f94ae9fd6e856f84d50e82e3be08b7d9035aa26" exitCode=0 Nov 25 15:48:19 crc kubenswrapper[4879]: I1125 15:48:19.690853 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6d6cce1f-4a79-40b0-b252-83b49b6a4770","Type":"ContainerDied","Data":"b96f9cdb659f42908681d3832f94ae9fd6e856f84d50e82e3be08b7d9035aa26"} Nov 25 15:48:20 crc kubenswrapper[4879]: I1125 15:48:20.702213 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6d6cce1f-4a79-40b0-b252-83b49b6a4770","Type":"ContainerStarted","Data":"cf72d8262868ec30cea135b9a5d77f23506c366c1fff82e360c107ceffb5b129"} Nov 25 15:48:20 crc kubenswrapper[4879]: I1125 15:48:20.727603 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.727588362 podStartE2EDuration="8.727588362s" podCreationTimestamp="2025-11-25 15:48:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:20.72641709 +0000 UTC m=+4992.329830161" watchObservedRunningTime="2025-11-25 15:48:20.727588362 +0000 UTC m=+4992.331001433" Nov 25 15:48:21 crc kubenswrapper[4879]: I1125 15:48:21.432312 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:21 crc kubenswrapper[4879]: I1125 15:48:21.712040 4879 generic.go:334] "Generic (PLEG): container finished" podID="dce69d75-a372-4ec2-87d3-c84104c18dc6" containerID="b7d3adb15535f6cf5155b59a701b575cf7f62392bd5074ed3644d650ca11cbe9" exitCode=0 Nov 25 15:48:21 crc kubenswrapper[4879]: I1125 15:48:21.712135 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"dce69d75-a372-4ec2-87d3-c84104c18dc6","Type":"ContainerDied","Data":"b7d3adb15535f6cf5155b59a701b575cf7f62392bd5074ed3644d650ca11cbe9"} Nov 25 15:48:21 crc kubenswrapper[4879]: I1125 15:48:21.731062 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:48:21 crc kubenswrapper[4879]: I1125 15:48:21.782485 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jn5v4"] Nov 25 15:48:21 crc kubenswrapper[4879]: I1125 15:48:21.782726 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" podUID="240e07c7-3799-433b-848a-8346df71cf89" containerName="dnsmasq-dns" containerID="cri-o://c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e" gracePeriod=10 Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.176986 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.240720 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-config\") pod \"240e07c7-3799-433b-848a-8346df71cf89\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.240817 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj7vz\" (UniqueName: \"kubernetes.io/projected/240e07c7-3799-433b-848a-8346df71cf89-kube-api-access-fj7vz\") pod \"240e07c7-3799-433b-848a-8346df71cf89\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.240868 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-dns-svc\") pod \"240e07c7-3799-433b-848a-8346df71cf89\" (UID: \"240e07c7-3799-433b-848a-8346df71cf89\") " Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.253145 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/240e07c7-3799-433b-848a-8346df71cf89-kube-api-access-fj7vz" (OuterVolumeSpecName: "kube-api-access-fj7vz") pod "240e07c7-3799-433b-848a-8346df71cf89" (UID: "240e07c7-3799-433b-848a-8346df71cf89"). InnerVolumeSpecName "kube-api-access-fj7vz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.280164 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "240e07c7-3799-433b-848a-8346df71cf89" (UID: "240e07c7-3799-433b-848a-8346df71cf89"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.282288 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-config" (OuterVolumeSpecName: "config") pod "240e07c7-3799-433b-848a-8346df71cf89" (UID: "240e07c7-3799-433b-848a-8346df71cf89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.342529 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.342785 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240e07c7-3799-433b-848a-8346df71cf89-config\") on node \"crc\" DevicePath \"\"" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.342867 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj7vz\" (UniqueName: \"kubernetes.io/projected/240e07c7-3799-433b-848a-8346df71cf89-kube-api-access-fj7vz\") on node \"crc\" DevicePath \"\"" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.720140 4879 generic.go:334] "Generic (PLEG): container finished" podID="240e07c7-3799-433b-848a-8346df71cf89" containerID="c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e" exitCode=0 Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.720189 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" event={"ID":"240e07c7-3799-433b-848a-8346df71cf89","Type":"ContainerDied","Data":"c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e"} Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.720256 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" event={"ID":"240e07c7-3799-433b-848a-8346df71cf89","Type":"ContainerDied","Data":"4903b4524d0df3026e5ae53f07c470aaf3cc1875e390dbd4845f8eea8bdf3d2e"} Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.720287 4879 scope.go:117] "RemoveContainer" containerID="c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.720209 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jn5v4" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.722773 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"dce69d75-a372-4ec2-87d3-c84104c18dc6","Type":"ContainerStarted","Data":"e332bf0c8c41ae0f1898c216a6acc63f6ae03c432ac088961548c108fe7ad2c8"} Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.748365 4879 scope.go:117] "RemoveContainer" containerID="ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a" Nov 25 15:48:22 crc kubenswrapper[4879]: I1125 15:48:22.763421 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=9.763403637 podStartE2EDuration="9.763403637s" podCreationTimestamp="2025-11-25 15:48:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:22.750707556 +0000 UTC m=+4994.354120627" watchObservedRunningTime="2025-11-25 15:48:22.763403637 +0000 UTC m=+4994.366816708" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:22.778284 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jn5v4"] Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:22.784892 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jn5v4"] Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:22.796598 4879 scope.go:117] "RemoveContainer" containerID="c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e" Nov 25 15:48:23 crc kubenswrapper[4879]: E1125 15:48:22.797803 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e\": container with ID starting with c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e not found: ID does not exist" containerID="c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:22.797836 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e"} err="failed to get container status \"c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e\": rpc error: code = NotFound desc = could not find container \"c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e\": container with ID starting with c39b70164b05c668ab9bbce59839aefb45e93edaf5dbc7acf7b02d16fc1ef76e not found: ID does not exist" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:22.797859 4879 scope.go:117] "RemoveContainer" containerID="ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a" Nov 25 15:48:23 crc kubenswrapper[4879]: E1125 15:48:22.798405 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a\": container with ID starting with ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a not found: ID does not exist" containerID="ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:22.798455 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a"} err="failed to get container status \"ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a\": rpc error: code = NotFound desc = could not find container \"ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a\": container with ID starting with ac9d9e297e93d95220645944dd1fec00cc4b1cd4b7cc0f70386b1b97106b7f3a not found: ID does not exist" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:23.464992 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:23.465044 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:23.661520 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="240e07c7-3799-433b-848a-8346df71cf89" path="/var/lib/kubelet/pods/240e07c7-3799-433b-848a-8346df71cf89/volumes" Nov 25 15:48:23 crc kubenswrapper[4879]: I1125 15:48:23.869330 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 25 15:48:25 crc kubenswrapper[4879]: I1125 15:48:25.049529 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:25 crc kubenswrapper[4879]: I1125 15:48:25.049607 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:25 crc kubenswrapper[4879]: I1125 15:48:25.638501 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 25 15:48:25 crc kubenswrapper[4879]: I1125 15:48:25.712306 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 25 15:48:27 crc kubenswrapper[4879]: I1125 15:48:27.228539 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:27 crc kubenswrapper[4879]: I1125 15:48:27.295595 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 25 15:48:46 crc kubenswrapper[4879]: I1125 15:48:46.918723 4879 generic.go:334] "Generic (PLEG): container finished" podID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerID="713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59" exitCode=0 Nov 25 15:48:46 crc kubenswrapper[4879]: I1125 15:48:46.918812 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9890ff93-76f9-4d51-a567-68bab2a4fda8","Type":"ContainerDied","Data":"713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59"} Nov 25 15:48:46 crc kubenswrapper[4879]: I1125 15:48:46.921546 4879 generic.go:334] "Generic (PLEG): container finished" podID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerID="8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3" exitCode=0 Nov 25 15:48:46 crc kubenswrapper[4879]: I1125 15:48:46.921723 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0da22dcb-938f-4b60-b6b2-a428bc5bede3","Type":"ContainerDied","Data":"8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3"} Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.408617 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.408721 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.931886 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9890ff93-76f9-4d51-a567-68bab2a4fda8","Type":"ContainerStarted","Data":"b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75"} Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.932393 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.933594 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0da22dcb-938f-4b60-b6b2-a428bc5bede3","Type":"ContainerStarted","Data":"bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189"} Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.933766 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.958210 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.958192494 podStartE2EDuration="36.958192494s" podCreationTimestamp="2025-11-25 15:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:47.950057186 +0000 UTC m=+5019.553470277" watchObservedRunningTime="2025-11-25 15:48:47.958192494 +0000 UTC m=+5019.561605565" Nov 25 15:48:47 crc kubenswrapper[4879]: I1125 15:48:47.976027 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.976007162 podStartE2EDuration="36.976007162s" podCreationTimestamp="2025-11-25 15:48:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:48:47.970376011 +0000 UTC m=+5019.573789092" watchObservedRunningTime="2025-11-25 15:48:47.976007162 +0000 UTC m=+5019.579420233" Nov 25 15:48:53 crc kubenswrapper[4879]: I1125 15:48:53.907987 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fd7bw"] Nov 25 15:48:53 crc kubenswrapper[4879]: E1125 15:48:53.909005 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="240e07c7-3799-433b-848a-8346df71cf89" containerName="init" Nov 25 15:48:53 crc kubenswrapper[4879]: I1125 15:48:53.909023 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="240e07c7-3799-433b-848a-8346df71cf89" containerName="init" Nov 25 15:48:53 crc kubenswrapper[4879]: E1125 15:48:53.909072 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="240e07c7-3799-433b-848a-8346df71cf89" containerName="dnsmasq-dns" Nov 25 15:48:53 crc kubenswrapper[4879]: I1125 15:48:53.909081 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="240e07c7-3799-433b-848a-8346df71cf89" containerName="dnsmasq-dns" Nov 25 15:48:53 crc kubenswrapper[4879]: I1125 15:48:53.909499 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="240e07c7-3799-433b-848a-8346df71cf89" containerName="dnsmasq-dns" Nov 25 15:48:53 crc kubenswrapper[4879]: I1125 15:48:53.912395 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:53 crc kubenswrapper[4879]: I1125 15:48:53.925610 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fd7bw"] Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.057477 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-utilities\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.057614 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-catalog-content\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.057692 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkd7d\" (UniqueName: \"kubernetes.io/projected/13bbef46-604f-470a-8fdf-fd809fd2cbd7-kube-api-access-pkd7d\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.159707 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-utilities\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.160027 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-catalog-content\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.160173 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkd7d\" (UniqueName: \"kubernetes.io/projected/13bbef46-604f-470a-8fdf-fd809fd2cbd7-kube-api-access-pkd7d\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.160289 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-utilities\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.160529 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-catalog-content\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.187117 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkd7d\" (UniqueName: \"kubernetes.io/projected/13bbef46-604f-470a-8fdf-fd809fd2cbd7-kube-api-access-pkd7d\") pod \"certified-operators-fd7bw\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.235067 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.738946 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fd7bw"] Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.999389 4879 generic.go:334] "Generic (PLEG): container finished" podID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerID="8afdb524b224c0323c13f10a11fc30cec2937a757ef2b5e3e17cac87b787d719" exitCode=0 Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.999454 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerDied","Data":"8afdb524b224c0323c13f10a11fc30cec2937a757ef2b5e3e17cac87b787d719"} Nov 25 15:48:54 crc kubenswrapper[4879]: I1125 15:48:54.999511 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerStarted","Data":"338423cde94035d620b814fe1e7c95026b802333081689afbc2474392f159efb"} Nov 25 15:48:56 crc kubenswrapper[4879]: I1125 15:48:56.015017 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerStarted","Data":"d74b9a4389a062f530c675ec96a956be88fbdf54e3845f0c62c3d1bd8e07cb4a"} Nov 25 15:48:57 crc kubenswrapper[4879]: I1125 15:48:57.024487 4879 generic.go:334] "Generic (PLEG): container finished" podID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerID="d74b9a4389a062f530c675ec96a956be88fbdf54e3845f0c62c3d1bd8e07cb4a" exitCode=0 Nov 25 15:48:57 crc kubenswrapper[4879]: I1125 15:48:57.024598 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerDied","Data":"d74b9a4389a062f530c675ec96a956be88fbdf54e3845f0c62c3d1bd8e07cb4a"} Nov 25 15:48:58 crc kubenswrapper[4879]: I1125 15:48:58.036230 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerStarted","Data":"673ffc5fb005803752480feaa3f79b387230a3f22f4f4e25f9dbbc96618e9a78"} Nov 25 15:48:58 crc kubenswrapper[4879]: I1125 15:48:58.059912 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fd7bw" podStartSLOduration=2.621512278 podStartE2EDuration="5.059892631s" podCreationTimestamp="2025-11-25 15:48:53 +0000 UTC" firstStartedPulling="2025-11-25 15:48:55.003439335 +0000 UTC m=+5026.606852406" lastFinishedPulling="2025-11-25 15:48:57.441819688 +0000 UTC m=+5029.045232759" observedRunningTime="2025-11-25 15:48:58.051969169 +0000 UTC m=+5029.655382240" watchObservedRunningTime="2025-11-25 15:48:58.059892631 +0000 UTC m=+5029.663305702" Nov 25 15:49:02 crc kubenswrapper[4879]: I1125 15:49:02.579159 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 25 15:49:02 crc kubenswrapper[4879]: I1125 15:49:02.929368 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:04 crc kubenswrapper[4879]: I1125 15:49:04.235376 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:49:04 crc kubenswrapper[4879]: I1125 15:49:04.235733 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:49:04 crc kubenswrapper[4879]: I1125 15:49:04.276737 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.081283 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-sdnzq"] Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.082964 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.097623 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-sdnzq"] Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.146371 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.203430 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fd7bw"] Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.226112 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf2zk\" (UniqueName: \"kubernetes.io/projected/c85229dc-568d-4f6c-8ffe-004dd62eb92e-kube-api-access-pf2zk\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.226169 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-config\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.226189 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.327468 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-config\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.327521 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf2zk\" (UniqueName: \"kubernetes.io/projected/c85229dc-568d-4f6c-8ffe-004dd62eb92e-kube-api-access-pf2zk\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.327546 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.328545 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.328798 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-config\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.347847 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf2zk\" (UniqueName: \"kubernetes.io/projected/c85229dc-568d-4f6c-8ffe-004dd62eb92e-kube-api-access-pf2zk\") pod \"dnsmasq-dns-5b7946d7b9-sdnzq\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.402295 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.638508 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-sdnzq"] Nov 25 15:49:05 crc kubenswrapper[4879]: I1125 15:49:05.760233 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:49:06 crc kubenswrapper[4879]: I1125 15:49:06.100187 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" event={"ID":"c85229dc-568d-4f6c-8ffe-004dd62eb92e","Type":"ContainerStarted","Data":"9733f42362eaa34aed048974736702273b7e7cf922b7a0009dcf0af12211b1a7"} Nov 25 15:49:06 crc kubenswrapper[4879]: I1125 15:49:06.100239 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" event={"ID":"c85229dc-568d-4f6c-8ffe-004dd62eb92e","Type":"ContainerStarted","Data":"d42b481aed30e57e6559c79fcfb5a9c54220ab1e29bf0b13a9d7009f4eccfbbd"} Nov 25 15:49:06 crc kubenswrapper[4879]: I1125 15:49:06.548247 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:49:07 crc kubenswrapper[4879]: I1125 15:49:07.108691 4879 generic.go:334] "Generic (PLEG): container finished" podID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerID="9733f42362eaa34aed048974736702273b7e7cf922b7a0009dcf0af12211b1a7" exitCode=0 Nov 25 15:49:07 crc kubenswrapper[4879]: I1125 15:49:07.111923 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" event={"ID":"c85229dc-568d-4f6c-8ffe-004dd62eb92e","Type":"ContainerDied","Data":"9733f42362eaa34aed048974736702273b7e7cf922b7a0009dcf0af12211b1a7"} Nov 25 15:49:07 crc kubenswrapper[4879]: I1125 15:49:07.112353 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fd7bw" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="registry-server" containerID="cri-o://673ffc5fb005803752480feaa3f79b387230a3f22f4f4e25f9dbbc96618e9a78" gracePeriod=2 Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.031877 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="rabbitmq" containerID="cri-o://bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189" gracePeriod=604798 Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.118581 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" event={"ID":"c85229dc-568d-4f6c-8ffe-004dd62eb92e","Type":"ContainerStarted","Data":"5e5c77be29cb635bee1d4c95e6e2cd7666e1bb814a8070db28fc12329a7b3b5d"} Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.118921 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.121794 4879 generic.go:334] "Generic (PLEG): container finished" podID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerID="673ffc5fb005803752480feaa3f79b387230a3f22f4f4e25f9dbbc96618e9a78" exitCode=0 Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.121824 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerDied","Data":"673ffc5fb005803752480feaa3f79b387230a3f22f4f4e25f9dbbc96618e9a78"} Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.142546 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" podStartSLOduration=3.142525946 podStartE2EDuration="3.142525946s" podCreationTimestamp="2025-11-25 15:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:49:08.13932171 +0000 UTC m=+5039.742734781" watchObservedRunningTime="2025-11-25 15:49:08.142525946 +0000 UTC m=+5039.745939017" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.222781 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.275822 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-catalog-content\") pod \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.276181 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkd7d\" (UniqueName: \"kubernetes.io/projected/13bbef46-604f-470a-8fdf-fd809fd2cbd7-kube-api-access-pkd7d\") pod \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.276248 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-utilities\") pod \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\" (UID: \"13bbef46-604f-470a-8fdf-fd809fd2cbd7\") " Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.277226 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-utilities" (OuterVolumeSpecName: "utilities") pod "13bbef46-604f-470a-8fdf-fd809fd2cbd7" (UID: "13bbef46-604f-470a-8fdf-fd809fd2cbd7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.281134 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13bbef46-604f-470a-8fdf-fd809fd2cbd7-kube-api-access-pkd7d" (OuterVolumeSpecName: "kube-api-access-pkd7d") pod "13bbef46-604f-470a-8fdf-fd809fd2cbd7" (UID: "13bbef46-604f-470a-8fdf-fd809fd2cbd7"). InnerVolumeSpecName "kube-api-access-pkd7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.324200 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13bbef46-604f-470a-8fdf-fd809fd2cbd7" (UID: "13bbef46-604f-470a-8fdf-fd809fd2cbd7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.378726 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.378786 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkd7d\" (UniqueName: \"kubernetes.io/projected/13bbef46-604f-470a-8fdf-fd809fd2cbd7-kube-api-access-pkd7d\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.378803 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13bbef46-604f-470a-8fdf-fd809fd2cbd7-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:08 crc kubenswrapper[4879]: I1125 15:49:08.444341 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="rabbitmq" containerID="cri-o://b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75" gracePeriod=604799 Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.130850 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fd7bw" event={"ID":"13bbef46-604f-470a-8fdf-fd809fd2cbd7","Type":"ContainerDied","Data":"338423cde94035d620b814fe1e7c95026b802333081689afbc2474392f159efb"} Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.130903 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fd7bw" Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.130920 4879 scope.go:117] "RemoveContainer" containerID="673ffc5fb005803752480feaa3f79b387230a3f22f4f4e25f9dbbc96618e9a78" Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.149741 4879 scope.go:117] "RemoveContainer" containerID="d74b9a4389a062f530c675ec96a956be88fbdf54e3845f0c62c3d1bd8e07cb4a" Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.164688 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fd7bw"] Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.171983 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fd7bw"] Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.173528 4879 scope.go:117] "RemoveContainer" containerID="8afdb524b224c0323c13f10a11fc30cec2937a757ef2b5e3e17cac87b787d719" Nov 25 15:49:09 crc kubenswrapper[4879]: I1125 15:49:09.655566 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" path="/var/lib/kubelet/pods/13bbef46-604f-470a-8fdf-fd809fd2cbd7/volumes" Nov 25 15:49:12 crc kubenswrapper[4879]: I1125 15:49:12.576918 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.245:5672: connect: connection refused" Nov 25 15:49:12 crc kubenswrapper[4879]: I1125 15:49:12.926746 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.246:5672: connect: connection refused" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.169968 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.178976 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.180100 4879 generic.go:334] "Generic (PLEG): container finished" podID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerID="b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75" exitCode=0 Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.180209 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9890ff93-76f9-4d51-a567-68bab2a4fda8","Type":"ContainerDied","Data":"b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75"} Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.180249 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9890ff93-76f9-4d51-a567-68bab2a4fda8","Type":"ContainerDied","Data":"5a455806d51bbe3b46c38eed462f8af93c39135bab2dd04e423195873e97cef4"} Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.180275 4879 scope.go:117] "RemoveContainer" containerID="b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.180475 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.183791 4879 generic.go:334] "Generic (PLEG): container finished" podID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerID="bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189" exitCode=0 Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.183865 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.183862 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0da22dcb-938f-4b60-b6b2-a428bc5bede3","Type":"ContainerDied","Data":"bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189"} Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.184374 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0da22dcb-938f-4b60-b6b2-a428bc5bede3","Type":"ContainerDied","Data":"29f9e609c0ae4394ed74cad5b1e736b58886eac4f1871420abfb1d2f93346ba9"} Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.211556 4879 scope.go:117] "RemoveContainer" containerID="713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.249088 4879 scope.go:117] "RemoveContainer" containerID="b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.252330 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75\": container with ID starting with b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75 not found: ID does not exist" containerID="b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.252419 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75"} err="failed to get container status \"b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75\": rpc error: code = NotFound desc = could not find container \"b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75\": container with ID starting with b564a0b099c58f43f670ffed8f17896b53fbb16f4b3a488beeda4ff7019f9e75 not found: ID does not exist" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.252466 4879 scope.go:117] "RemoveContainer" containerID="713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.252991 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59\": container with ID starting with 713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59 not found: ID does not exist" containerID="713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.253026 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59"} err="failed to get container status \"713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59\": rpc error: code = NotFound desc = could not find container \"713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59\": container with ID starting with 713c13aa1026cfeb0d2eebc64e83cda30ba5ac43ec02c6f2845cdf0f53c33f59 not found: ID does not exist" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.253045 4879 scope.go:117] "RemoveContainer" containerID="bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.273966 4879 scope.go:117] "RemoveContainer" containerID="8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285247 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkfxz\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-kube-api-access-kkfxz\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285294 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0da22dcb-938f-4b60-b6b2-a428bc5bede3-erlang-cookie-secret\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285480 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285503 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-plugins-conf\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285523 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-plugins\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285554 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9890ff93-76f9-4d51-a567-68bab2a4fda8-erlang-cookie-secret\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285628 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285657 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-erlang-cookie\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285681 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-confd\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285701 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-server-conf\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285725 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-plugins\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285743 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-server-conf\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285768 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-confd\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285785 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9890ff93-76f9-4d51-a567-68bab2a4fda8-pod-info\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285805 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-erlang-cookie\") pod \"9890ff93-76f9-4d51-a567-68bab2a4fda8\" (UID: \"9890ff93-76f9-4d51-a567-68bab2a4fda8\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285837 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhswr\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-kube-api-access-zhswr\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285911 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-plugins-conf\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.285961 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0da22dcb-938f-4b60-b6b2-a428bc5bede3-pod-info\") pod \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\" (UID: \"0da22dcb-938f-4b60-b6b2-a428bc5bede3\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.288008 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.290213 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.291016 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.291545 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.291839 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.291865 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.296577 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9890ff93-76f9-4d51-a567-68bab2a4fda8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.296768 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0da22dcb-938f-4b60-b6b2-a428bc5bede3-pod-info" (OuterVolumeSpecName: "pod-info") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.296786 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-kube-api-access-kkfxz" (OuterVolumeSpecName: "kube-api-access-kkfxz") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "kube-api-access-kkfxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.297395 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da22dcb-938f-4b60-b6b2-a428bc5bede3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.297624 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/9890ff93-76f9-4d51-a567-68bab2a4fda8-pod-info" (OuterVolumeSpecName: "pod-info") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.305724 4879 scope.go:117] "RemoveContainer" containerID="bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.306665 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189\": container with ID starting with bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189 not found: ID does not exist" containerID="bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.306716 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189"} err="failed to get container status \"bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189\": rpc error: code = NotFound desc = could not find container \"bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189\": container with ID starting with bd5b3f16b4fbbd97597928434162cc284620f176f3e264ae83437570a7460189 not found: ID does not exist" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.306752 4879 scope.go:117] "RemoveContainer" containerID="8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.307014 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3\": container with ID starting with 8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3 not found: ID does not exist" containerID="8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.307057 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3"} err="failed to get container status \"8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3\": rpc error: code = NotFound desc = could not find container \"8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3\": container with ID starting with 8b00eba0bb0650161331a3ca3da9e5d1d92e8e636851a9e575e2fc6af78c63b3 not found: ID does not exist" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.314717 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-kube-api-access-zhswr" (OuterVolumeSpecName: "kube-api-access-zhswr") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "kube-api-access-zhswr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.318901 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a" (OuterVolumeSpecName: "persistence") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "pvc-077d2db3-4da2-4546-8361-ffa01121a32a". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.324409 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e" (OuterVolumeSpecName: "persistence") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.338266 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-server-conf" (OuterVolumeSpecName: "server-conf") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.340134 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-server-conf" (OuterVolumeSpecName: "server-conf") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.387989 4879 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-server-conf\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388027 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388037 4879 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9890ff93-76f9-4d51-a567-68bab2a4fda8-pod-info\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388047 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhswr\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-kube-api-access-zhswr\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388058 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388066 4879 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0da22dcb-938f-4b60-b6b2-a428bc5bede3-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388073 4879 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0da22dcb-938f-4b60-b6b2-a428bc5bede3-pod-info\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388081 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkfxz\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-kube-api-access-kkfxz\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388089 4879 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0da22dcb-938f-4b60-b6b2-a428bc5bede3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388134 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") on node \"crc\" " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388147 4879 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388158 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388167 4879 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9890ff93-76f9-4d51-a567-68bab2a4fda8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388184 4879 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") on node \"crc\" " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388195 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.388204 4879 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9890ff93-76f9-4d51-a567-68bab2a4fda8-server-conf\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.390510 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "9890ff93-76f9-4d51-a567-68bab2a4fda8" (UID: "9890ff93-76f9-4d51-a567-68bab2a4fda8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.404450 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.410524 4879 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.410744 4879 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-077d2db3-4da2-4546-8361-ffa01121a32a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a") on node "crc" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.411041 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0da22dcb-938f-4b60-b6b2-a428bc5bede3" (UID: "0da22dcb-938f-4b60-b6b2-a428bc5bede3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.420001 4879 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.420181 4879 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e") on node "crc" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.461382 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-mvgxt"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.461676 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerName="dnsmasq-dns" containerID="cri-o://b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0" gracePeriod=10 Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.489231 4879 reconciler_common.go:293] "Volume detached for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.489263 4879 reconciler_common.go:293] "Volume detached for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.489276 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9890ff93-76f9-4d51-a567-68bab2a4fda8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.489288 4879 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0da22dcb-938f-4b60-b6b2-a428bc5bede3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.548051 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.559749 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.584872 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.600872 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601273 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="setup-container" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601297 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="setup-container" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601319 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="setup-container" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601326 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="setup-container" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601337 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="extract-content" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601345 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="extract-content" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601352 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="registry-server" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601358 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="registry-server" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601382 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="extract-utilities" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601390 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="extract-utilities" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601413 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="rabbitmq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601427 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="rabbitmq" Nov 25 15:49:15 crc kubenswrapper[4879]: E1125 15:49:15.601440 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="rabbitmq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601446 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="rabbitmq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601597 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" containerName="rabbitmq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601619 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="13bbef46-604f-470a-8fdf-fd809fd2cbd7" containerName="registry-server" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.601629 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" containerName="rabbitmq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.602581 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.609737 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.610487 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.610798 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dllx2" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.610966 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.612063 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.614261 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.629236 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.639411 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.640804 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.645054 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.645252 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.645458 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.645635 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.645745 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-s69kq" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.679309 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0da22dcb-938f-4b60-b6b2-a428bc5bede3" path="/var/lib/kubelet/pods/0da22dcb-938f-4b60-b6b2-a428bc5bede3/volumes" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.680027 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9890ff93-76f9-4d51-a567-68bab2a4fda8" path="/var/lib/kubelet/pods/9890ff93-76f9-4d51-a567-68bab2a4fda8/volumes" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.680599 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693629 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a756a228-0a62-4e5f-a4fe-f728972087c7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693655 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693680 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693697 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a503b64c-abfe-4b3b-a501-ef9b4203e56d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693716 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693731 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693788 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a756a228-0a62-4e5f-a4fe-f728972087c7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693824 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a756a228-0a62-4e5f-a4fe-f728972087c7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693846 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a756a228-0a62-4e5f-a4fe-f728972087c7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693868 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a503b64c-abfe-4b3b-a501-ef9b4203e56d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693902 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693925 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a503b64c-abfe-4b3b-a501-ef9b4203e56d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693968 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkp8k\" (UniqueName: \"kubernetes.io/projected/a503b64c-abfe-4b3b-a501-ef9b4203e56d-kube-api-access-bkp8k\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.693990 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.694016 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a503b64c-abfe-4b3b-a501-ef9b4203e56d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.694047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.694188 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8lz6\" (UniqueName: \"kubernetes.io/projected/a756a228-0a62-4e5f-a4fe-f728972087c7-kube-api-access-m8lz6\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795461 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a503b64c-abfe-4b3b-a501-ef9b4203e56d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795519 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795548 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8lz6\" (UniqueName: \"kubernetes.io/projected/a756a228-0a62-4e5f-a4fe-f728972087c7-kube-api-access-m8lz6\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795584 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795602 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a756a228-0a62-4e5f-a4fe-f728972087c7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795621 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795640 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a503b64c-abfe-4b3b-a501-ef9b4203e56d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795656 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795681 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795700 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795734 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a756a228-0a62-4e5f-a4fe-f728972087c7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795750 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a756a228-0a62-4e5f-a4fe-f728972087c7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795772 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a756a228-0a62-4e5f-a4fe-f728972087c7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795796 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795811 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a503b64c-abfe-4b3b-a501-ef9b4203e56d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795831 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a503b64c-abfe-4b3b-a501-ef9b4203e56d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795851 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkp8k\" (UniqueName: \"kubernetes.io/projected/a503b64c-abfe-4b3b-a501-ef9b4203e56d-kube-api-access-bkp8k\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.795871 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.796583 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.797085 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a756a228-0a62-4e5f-a4fe-f728972087c7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.797396 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a756a228-0a62-4e5f-a4fe-f728972087c7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.797537 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a503b64c-abfe-4b3b-a501-ef9b4203e56d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.797900 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.798203 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a503b64c-abfe-4b3b-a501-ef9b4203e56d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.798188 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.798553 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.803351 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.803408 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8b57da8dd8d5f860806fbeafe2314c1046ba0e4dec12aeb61b37fa286902e8be/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.805435 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a756a228-0a62-4e5f-a4fe-f728972087c7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.805454 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a503b64c-abfe-4b3b-a501-ef9b4203e56d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.806960 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a503b64c-abfe-4b3b-a501-ef9b4203e56d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.815558 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.815602 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/52716e15751c55389f46a7427f09f295fbea86d2056bda7fbe49a8347e32d922/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.816802 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a756a228-0a62-4e5f-a4fe-f728972087c7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.819090 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8lz6\" (UniqueName: \"kubernetes.io/projected/a756a228-0a62-4e5f-a4fe-f728972087c7-kube-api-access-m8lz6\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.819889 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkp8k\" (UniqueName: \"kubernetes.io/projected/a503b64c-abfe-4b3b-a501-ef9b4203e56d-kube-api-access-bkp8k\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.820442 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a503b64c-abfe-4b3b-a501-ef9b4203e56d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.822377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a756a228-0a62-4e5f-a4fe-f728972087c7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.853374 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e0b83fbd-c8b5-483e-870e-9b6bc7fd2f8e\") pod \"rabbitmq-server-0\" (UID: \"a756a228-0a62-4e5f-a4fe-f728972087c7\") " pod="openstack/rabbitmq-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.855039 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-077d2db3-4da2-4546-8361-ffa01121a32a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-077d2db3-4da2-4546-8361-ffa01121a32a\") pod \"rabbitmq-cell1-server-0\" (UID: \"a503b64c-abfe-4b3b-a501-ef9b4203e56d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.877860 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.897579 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-config\") pod \"ea2ced60-4ddd-42db-a2c3-577cb848f906\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.897695 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-dns-svc\") pod \"ea2ced60-4ddd-42db-a2c3-577cb848f906\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.897747 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4jwz\" (UniqueName: \"kubernetes.io/projected/ea2ced60-4ddd-42db-a2c3-577cb848f906-kube-api-access-n4jwz\") pod \"ea2ced60-4ddd-42db-a2c3-577cb848f906\" (UID: \"ea2ced60-4ddd-42db-a2c3-577cb848f906\") " Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.906426 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea2ced60-4ddd-42db-a2c3-577cb848f906-kube-api-access-n4jwz" (OuterVolumeSpecName: "kube-api-access-n4jwz") pod "ea2ced60-4ddd-42db-a2c3-577cb848f906" (UID: "ea2ced60-4ddd-42db-a2c3-577cb848f906"). InnerVolumeSpecName "kube-api-access-n4jwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.927812 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.942117 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-config" (OuterVolumeSpecName: "config") pod "ea2ced60-4ddd-42db-a2c3-577cb848f906" (UID: "ea2ced60-4ddd-42db-a2c3-577cb848f906"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.946775 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ea2ced60-4ddd-42db-a2c3-577cb848f906" (UID: "ea2ced60-4ddd-42db-a2c3-577cb848f906"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:49:15 crc kubenswrapper[4879]: I1125 15:49:15.968025 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.001217 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.001311 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4jwz\" (UniqueName: \"kubernetes.io/projected/ea2ced60-4ddd-42db-a2c3-577cb848f906-kube-api-access-n4jwz\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.001333 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea2ced60-4ddd-42db-a2c3-577cb848f906-config\") on node \"crc\" DevicePath \"\"" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.196603 4879 generic.go:334] "Generic (PLEG): container finished" podID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerID="b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0" exitCode=0 Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.196651 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" event={"ID":"ea2ced60-4ddd-42db-a2c3-577cb848f906","Type":"ContainerDied","Data":"b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0"} Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.196678 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" event={"ID":"ea2ced60-4ddd-42db-a2c3-577cb848f906","Type":"ContainerDied","Data":"8eb9679825fbb0c264c750761ae6abf835740e91af94e1c1b9fa63468dd98771"} Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.196700 4879 scope.go:117] "RemoveContainer" containerID="b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.196840 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-mvgxt" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.216203 4879 scope.go:117] "RemoveContainer" containerID="0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.231149 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-mvgxt"] Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.236101 4879 scope.go:117] "RemoveContainer" containerID="b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0" Nov 25 15:49:16 crc kubenswrapper[4879]: E1125 15:49:16.236604 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0\": container with ID starting with b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0 not found: ID does not exist" containerID="b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.236639 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0"} err="failed to get container status \"b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0\": rpc error: code = NotFound desc = could not find container \"b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0\": container with ID starting with b2c5f5ffb6e43986525ebc83b259c70f233bcc4aabf967dfe6ed38862776fed0 not found: ID does not exist" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.236662 4879 scope.go:117] "RemoveContainer" containerID="0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549" Nov 25 15:49:16 crc kubenswrapper[4879]: E1125 15:49:16.237022 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549\": container with ID starting with 0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549 not found: ID does not exist" containerID="0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.237039 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549"} err="failed to get container status \"0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549\": rpc error: code = NotFound desc = could not find container \"0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549\": container with ID starting with 0350607a706ce1d94a0f87ada8b0a5fd6d7b081246706d36ae9ad60cfb57e549 not found: ID does not exist" Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.237354 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-mvgxt"] Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.349382 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 25 15:49:16 crc kubenswrapper[4879]: I1125 15:49:16.430217 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 25 15:49:16 crc kubenswrapper[4879]: W1125 15:49:16.436567 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda756a228_0a62_4e5f_a4fe_f728972087c7.slice/crio-af3e1fb7b3d4be871f7d6f1946fdfd800c505295d3759fcb44f71008348c5624 WatchSource:0}: Error finding container af3e1fb7b3d4be871f7d6f1946fdfd800c505295d3759fcb44f71008348c5624: Status 404 returned error can't find the container with id af3e1fb7b3d4be871f7d6f1946fdfd800c505295d3759fcb44f71008348c5624 Nov 25 15:49:17 crc kubenswrapper[4879]: I1125 15:49:17.204188 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a503b64c-abfe-4b3b-a501-ef9b4203e56d","Type":"ContainerStarted","Data":"0137e2d37ae0f9f8631223496199f79dfe0112624d9553ff3ad6e96f5599415c"} Nov 25 15:49:17 crc kubenswrapper[4879]: I1125 15:49:17.205594 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a756a228-0a62-4e5f-a4fe-f728972087c7","Type":"ContainerStarted","Data":"af3e1fb7b3d4be871f7d6f1946fdfd800c505295d3759fcb44f71008348c5624"} Nov 25 15:49:17 crc kubenswrapper[4879]: I1125 15:49:17.408751 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:49:17 crc kubenswrapper[4879]: I1125 15:49:17.408814 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:49:17 crc kubenswrapper[4879]: I1125 15:49:17.655436 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" path="/var/lib/kubelet/pods/ea2ced60-4ddd-42db-a2c3-577cb848f906/volumes" Nov 25 15:49:18 crc kubenswrapper[4879]: I1125 15:49:18.221471 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a756a228-0a62-4e5f-a4fe-f728972087c7","Type":"ContainerStarted","Data":"c95ca83a785e18a5a159d969e21df4917b44d52d16f602dae72484d30ae4195c"} Nov 25 15:49:18 crc kubenswrapper[4879]: I1125 15:49:18.223800 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a503b64c-abfe-4b3b-a501-ef9b4203e56d","Type":"ContainerStarted","Data":"7624af6f7201eb97325f210bb7c0894e1309487ae8832b1455ca14549c8e6993"} Nov 25 15:49:47 crc kubenswrapper[4879]: I1125 15:49:47.409102 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:49:47 crc kubenswrapper[4879]: I1125 15:49:47.409674 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:49:47 crc kubenswrapper[4879]: I1125 15:49:47.409715 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:49:47 crc kubenswrapper[4879]: I1125 15:49:47.410416 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:49:47 crc kubenswrapper[4879]: I1125 15:49:47.410482 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" gracePeriod=600 Nov 25 15:49:47 crc kubenswrapper[4879]: E1125 15:49:47.586708 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:49:48 crc kubenswrapper[4879]: I1125 15:49:48.453114 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" exitCode=0 Nov 25 15:49:48 crc kubenswrapper[4879]: I1125 15:49:48.453177 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38"} Nov 25 15:49:48 crc kubenswrapper[4879]: I1125 15:49:48.453439 4879 scope.go:117] "RemoveContainer" containerID="28ee074ec08968e61681d80dad6c90c50953242ae0a04e644b744013e9695122" Nov 25 15:49:48 crc kubenswrapper[4879]: I1125 15:49:48.454052 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:49:48 crc kubenswrapper[4879]: E1125 15:49:48.454394 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:49:50 crc kubenswrapper[4879]: I1125 15:49:50.470755 4879 generic.go:334] "Generic (PLEG): container finished" podID="a503b64c-abfe-4b3b-a501-ef9b4203e56d" containerID="7624af6f7201eb97325f210bb7c0894e1309487ae8832b1455ca14549c8e6993" exitCode=0 Nov 25 15:49:50 crc kubenswrapper[4879]: I1125 15:49:50.470809 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a503b64c-abfe-4b3b-a501-ef9b4203e56d","Type":"ContainerDied","Data":"7624af6f7201eb97325f210bb7c0894e1309487ae8832b1455ca14549c8e6993"} Nov 25 15:49:50 crc kubenswrapper[4879]: I1125 15:49:50.473899 4879 generic.go:334] "Generic (PLEG): container finished" podID="a756a228-0a62-4e5f-a4fe-f728972087c7" containerID="c95ca83a785e18a5a159d969e21df4917b44d52d16f602dae72484d30ae4195c" exitCode=0 Nov 25 15:49:50 crc kubenswrapper[4879]: I1125 15:49:50.473970 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a756a228-0a62-4e5f-a4fe-f728972087c7","Type":"ContainerDied","Data":"c95ca83a785e18a5a159d969e21df4917b44d52d16f602dae72484d30ae4195c"} Nov 25 15:49:51 crc kubenswrapper[4879]: I1125 15:49:51.484796 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a756a228-0a62-4e5f-a4fe-f728972087c7","Type":"ContainerStarted","Data":"703fa8dfe901c2f159006f452a408255c8f9b6a2540bf4f865096359d34f887b"} Nov 25 15:49:51 crc kubenswrapper[4879]: I1125 15:49:51.485961 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 25 15:49:51 crc kubenswrapper[4879]: I1125 15:49:51.487240 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a503b64c-abfe-4b3b-a501-ef9b4203e56d","Type":"ContainerStarted","Data":"115ccdfda7199c084d2724068ad10a474f0760f0146b8d2bc49093649e61d239"} Nov 25 15:49:51 crc kubenswrapper[4879]: I1125 15:49:51.487514 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:49:51 crc kubenswrapper[4879]: I1125 15:49:51.517809 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.517565967 podStartE2EDuration="36.517565967s" podCreationTimestamp="2025-11-25 15:49:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:49:51.50944678 +0000 UTC m=+5083.112859861" watchObservedRunningTime="2025-11-25 15:49:51.517565967 +0000 UTC m=+5083.120979048" Nov 25 15:49:51 crc kubenswrapper[4879]: I1125 15:49:51.538324 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.538303784 podStartE2EDuration="36.538303784s" podCreationTimestamp="2025-11-25 15:49:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:49:51.53368289 +0000 UTC m=+5083.137095961" watchObservedRunningTime="2025-11-25 15:49:51.538303784 +0000 UTC m=+5083.141716855" Nov 25 15:50:02 crc kubenswrapper[4879]: I1125 15:50:02.644845 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:50:02 crc kubenswrapper[4879]: E1125 15:50:02.645591 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:50:05 crc kubenswrapper[4879]: I1125 15:50:05.930997 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 25 15:50:05 crc kubenswrapper[4879]: I1125 15:50:05.971081 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 25 15:50:13 crc kubenswrapper[4879]: I1125 15:50:13.644810 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:50:13 crc kubenswrapper[4879]: E1125 15:50:13.645630 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.120612 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Nov 25 15:50:17 crc kubenswrapper[4879]: E1125 15:50:17.121237 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerName="init" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.121250 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerName="init" Nov 25 15:50:17 crc kubenswrapper[4879]: E1125 15:50:17.121269 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerName="dnsmasq-dns" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.121276 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerName="dnsmasq-dns" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.121426 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea2ced60-4ddd-42db-a2c3-577cb848f906" containerName="dnsmasq-dns" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.121953 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.125429 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-h58mr" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.128843 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.182270 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkt85\" (UniqueName: \"kubernetes.io/projected/84301ea3-8db6-4eb8-9095-6cbb826e1df8-kube-api-access-jkt85\") pod \"mariadb-client-1-default\" (UID: \"84301ea3-8db6-4eb8-9095-6cbb826e1df8\") " pod="openstack/mariadb-client-1-default" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.282812 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkt85\" (UniqueName: \"kubernetes.io/projected/84301ea3-8db6-4eb8-9095-6cbb826e1df8-kube-api-access-jkt85\") pod \"mariadb-client-1-default\" (UID: \"84301ea3-8db6-4eb8-9095-6cbb826e1df8\") " pod="openstack/mariadb-client-1-default" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.306822 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkt85\" (UniqueName: \"kubernetes.io/projected/84301ea3-8db6-4eb8-9095-6cbb826e1df8-kube-api-access-jkt85\") pod \"mariadb-client-1-default\" (UID: \"84301ea3-8db6-4eb8-9095-6cbb826e1df8\") " pod="openstack/mariadb-client-1-default" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.447971 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 25 15:50:17 crc kubenswrapper[4879]: I1125 15:50:17.944899 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 25 15:50:17 crc kubenswrapper[4879]: W1125 15:50:17.950788 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84301ea3_8db6_4eb8_9095_6cbb826e1df8.slice/crio-9dd2c4f1013029a23fbb39996869e94203306143e0dec6582d184e9880ff0ae3 WatchSource:0}: Error finding container 9dd2c4f1013029a23fbb39996869e94203306143e0dec6582d184e9880ff0ae3: Status 404 returned error can't find the container with id 9dd2c4f1013029a23fbb39996869e94203306143e0dec6582d184e9880ff0ae3 Nov 25 15:50:18 crc kubenswrapper[4879]: I1125 15:50:18.689335 4879 generic.go:334] "Generic (PLEG): container finished" podID="84301ea3-8db6-4eb8-9095-6cbb826e1df8" containerID="d6ac269f28956b59b9dba33643de9e19a6c2ace67ea67aa9256d847d2cadc5ee" exitCode=0 Nov 25 15:50:18 crc kubenswrapper[4879]: I1125 15:50:18.689402 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"84301ea3-8db6-4eb8-9095-6cbb826e1df8","Type":"ContainerDied","Data":"d6ac269f28956b59b9dba33643de9e19a6c2ace67ea67aa9256d847d2cadc5ee"} Nov 25 15:50:18 crc kubenswrapper[4879]: I1125 15:50:18.689459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"84301ea3-8db6-4eb8-9095-6cbb826e1df8","Type":"ContainerStarted","Data":"9dd2c4f1013029a23fbb39996869e94203306143e0dec6582d184e9880ff0ae3"} Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.020994 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.049322 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_84301ea3-8db6-4eb8-9095-6cbb826e1df8/mariadb-client-1-default/0.log" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.073803 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.081482 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.123208 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkt85\" (UniqueName: \"kubernetes.io/projected/84301ea3-8db6-4eb8-9095-6cbb826e1df8-kube-api-access-jkt85\") pod \"84301ea3-8db6-4eb8-9095-6cbb826e1df8\" (UID: \"84301ea3-8db6-4eb8-9095-6cbb826e1df8\") " Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.129059 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84301ea3-8db6-4eb8-9095-6cbb826e1df8-kube-api-access-jkt85" (OuterVolumeSpecName: "kube-api-access-jkt85") pod "84301ea3-8db6-4eb8-9095-6cbb826e1df8" (UID: "84301ea3-8db6-4eb8-9095-6cbb826e1df8"). InnerVolumeSpecName "kube-api-access-jkt85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.224270 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkt85\" (UniqueName: \"kubernetes.io/projected/84301ea3-8db6-4eb8-9095-6cbb826e1df8-kube-api-access-jkt85\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.502069 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Nov 25 15:50:20 crc kubenswrapper[4879]: E1125 15:50:20.502547 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84301ea3-8db6-4eb8-9095-6cbb826e1df8" containerName="mariadb-client-1-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.502576 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="84301ea3-8db6-4eb8-9095-6cbb826e1df8" containerName="mariadb-client-1-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.502832 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="84301ea3-8db6-4eb8-9095-6cbb826e1df8" containerName="mariadb-client-1-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.503597 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.509699 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.629392 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w97cp\" (UniqueName: \"kubernetes.io/projected/97458cb8-bac3-4b2f-9a3a-055e99ad9457-kube-api-access-w97cp\") pod \"mariadb-client-2-default\" (UID: \"97458cb8-bac3-4b2f-9a3a-055e99ad9457\") " pod="openstack/mariadb-client-2-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.705954 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dd2c4f1013029a23fbb39996869e94203306143e0dec6582d184e9880ff0ae3" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.706178 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.731553 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w97cp\" (UniqueName: \"kubernetes.io/projected/97458cb8-bac3-4b2f-9a3a-055e99ad9457-kube-api-access-w97cp\") pod \"mariadb-client-2-default\" (UID: \"97458cb8-bac3-4b2f-9a3a-055e99ad9457\") " pod="openstack/mariadb-client-2-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.753285 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w97cp\" (UniqueName: \"kubernetes.io/projected/97458cb8-bac3-4b2f-9a3a-055e99ad9457-kube-api-access-w97cp\") pod \"mariadb-client-2-default\" (UID: \"97458cb8-bac3-4b2f-9a3a-055e99ad9457\") " pod="openstack/mariadb-client-2-default" Nov 25 15:50:20 crc kubenswrapper[4879]: I1125 15:50:20.819823 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 25 15:50:21 crc kubenswrapper[4879]: I1125 15:50:21.323073 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 25 15:50:21 crc kubenswrapper[4879]: I1125 15:50:21.655822 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84301ea3-8db6-4eb8-9095-6cbb826e1df8" path="/var/lib/kubelet/pods/84301ea3-8db6-4eb8-9095-6cbb826e1df8/volumes" Nov 25 15:50:21 crc kubenswrapper[4879]: I1125 15:50:21.714861 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"97458cb8-bac3-4b2f-9a3a-055e99ad9457","Type":"ContainerStarted","Data":"bbb378f22aa963cee394575007fae43400be2512ab3b1790e49d7a27eaaade17"} Nov 25 15:50:21 crc kubenswrapper[4879]: I1125 15:50:21.714906 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"97458cb8-bac3-4b2f-9a3a-055e99ad9457","Type":"ContainerStarted","Data":"c27482cb7b5539c9d53b4f45b81be0003752064afdac4ce46162ff53721f64a1"} Nov 25 15:50:21 crc kubenswrapper[4879]: I1125 15:50:21.731771 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=1.731752542 podStartE2EDuration="1.731752542s" podCreationTimestamp="2025-11-25 15:50:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:50:21.726581873 +0000 UTC m=+5113.329994954" watchObservedRunningTime="2025-11-25 15:50:21.731752542 +0000 UTC m=+5113.335165603" Nov 25 15:50:22 crc kubenswrapper[4879]: I1125 15:50:22.724097 4879 generic.go:334] "Generic (PLEG): container finished" podID="97458cb8-bac3-4b2f-9a3a-055e99ad9457" containerID="bbb378f22aa963cee394575007fae43400be2512ab3b1790e49d7a27eaaade17" exitCode=1 Nov 25 15:50:22 crc kubenswrapper[4879]: I1125 15:50:22.724178 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"97458cb8-bac3-4b2f-9a3a-055e99ad9457","Type":"ContainerDied","Data":"bbb378f22aa963cee394575007fae43400be2512ab3b1790e49d7a27eaaade17"} Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.067360 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.081820 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w97cp\" (UniqueName: \"kubernetes.io/projected/97458cb8-bac3-4b2f-9a3a-055e99ad9457-kube-api-access-w97cp\") pod \"97458cb8-bac3-4b2f-9a3a-055e99ad9457\" (UID: \"97458cb8-bac3-4b2f-9a3a-055e99ad9457\") " Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.088143 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97458cb8-bac3-4b2f-9a3a-055e99ad9457-kube-api-access-w97cp" (OuterVolumeSpecName: "kube-api-access-w97cp") pod "97458cb8-bac3-4b2f-9a3a-055e99ad9457" (UID: "97458cb8-bac3-4b2f-9a3a-055e99ad9457"). InnerVolumeSpecName "kube-api-access-w97cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.115677 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.124904 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.183529 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w97cp\" (UniqueName: \"kubernetes.io/projected/97458cb8-bac3-4b2f-9a3a-055e99ad9457-kube-api-access-w97cp\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.501850 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Nov 25 15:50:24 crc kubenswrapper[4879]: E1125 15:50:24.502263 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97458cb8-bac3-4b2f-9a3a-055e99ad9457" containerName="mariadb-client-2-default" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.502290 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="97458cb8-bac3-4b2f-9a3a-055e99ad9457" containerName="mariadb-client-2-default" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.502522 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="97458cb8-bac3-4b2f-9a3a-055e99ad9457" containerName="mariadb-client-2-default" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.503200 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.512932 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.590339 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tn52\" (UniqueName: \"kubernetes.io/projected/33a346d4-e62c-4390-bbd3-17ff770a8f7d-kube-api-access-9tn52\") pod \"mariadb-client-1\" (UID: \"33a346d4-e62c-4390-bbd3-17ff770a8f7d\") " pod="openstack/mariadb-client-1" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.644447 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:50:24 crc kubenswrapper[4879]: E1125 15:50:24.644676 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.691887 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tn52\" (UniqueName: \"kubernetes.io/projected/33a346d4-e62c-4390-bbd3-17ff770a8f7d-kube-api-access-9tn52\") pod \"mariadb-client-1\" (UID: \"33a346d4-e62c-4390-bbd3-17ff770a8f7d\") " pod="openstack/mariadb-client-1" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.710227 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tn52\" (UniqueName: \"kubernetes.io/projected/33a346d4-e62c-4390-bbd3-17ff770a8f7d-kube-api-access-9tn52\") pod \"mariadb-client-1\" (UID: \"33a346d4-e62c-4390-bbd3-17ff770a8f7d\") " pod="openstack/mariadb-client-1" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.741839 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c27482cb7b5539c9d53b4f45b81be0003752064afdac4ce46162ff53721f64a1" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.741911 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 25 15:50:24 crc kubenswrapper[4879]: I1125 15:50:24.819754 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 25 15:50:25 crc kubenswrapper[4879]: I1125 15:50:25.301485 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 25 15:50:25 crc kubenswrapper[4879]: W1125 15:50:25.304291 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33a346d4_e62c_4390_bbd3_17ff770a8f7d.slice/crio-33984c92d0e11729ccb1d0d1af90e7ff11fce0311af01bffa570e4f74872f523 WatchSource:0}: Error finding container 33984c92d0e11729ccb1d0d1af90e7ff11fce0311af01bffa570e4f74872f523: Status 404 returned error can't find the container with id 33984c92d0e11729ccb1d0d1af90e7ff11fce0311af01bffa570e4f74872f523 Nov 25 15:50:25 crc kubenswrapper[4879]: I1125 15:50:25.654192 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97458cb8-bac3-4b2f-9a3a-055e99ad9457" path="/var/lib/kubelet/pods/97458cb8-bac3-4b2f-9a3a-055e99ad9457/volumes" Nov 25 15:50:25 crc kubenswrapper[4879]: I1125 15:50:25.752265 4879 generic.go:334] "Generic (PLEG): container finished" podID="33a346d4-e62c-4390-bbd3-17ff770a8f7d" containerID="f73b21b576e8adfeb8148416ea46e2a2abd657724a1af86e8d421bdf4f84840c" exitCode=0 Nov 25 15:50:25 crc kubenswrapper[4879]: I1125 15:50:25.752350 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"33a346d4-e62c-4390-bbd3-17ff770a8f7d","Type":"ContainerDied","Data":"f73b21b576e8adfeb8148416ea46e2a2abd657724a1af86e8d421bdf4f84840c"} Nov 25 15:50:25 crc kubenswrapper[4879]: I1125 15:50:25.752386 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"33a346d4-e62c-4390-bbd3-17ff770a8f7d","Type":"ContainerStarted","Data":"33984c92d0e11729ccb1d0d1af90e7ff11fce0311af01bffa570e4f74872f523"} Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.129949 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.150659 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_33a346d4-e62c-4390-bbd3-17ff770a8f7d/mariadb-client-1/0.log" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.173734 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.180688 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.332343 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tn52\" (UniqueName: \"kubernetes.io/projected/33a346d4-e62c-4390-bbd3-17ff770a8f7d-kube-api-access-9tn52\") pod \"33a346d4-e62c-4390-bbd3-17ff770a8f7d\" (UID: \"33a346d4-e62c-4390-bbd3-17ff770a8f7d\") " Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.339260 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33a346d4-e62c-4390-bbd3-17ff770a8f7d-kube-api-access-9tn52" (OuterVolumeSpecName: "kube-api-access-9tn52") pod "33a346d4-e62c-4390-bbd3-17ff770a8f7d" (UID: "33a346d4-e62c-4390-bbd3-17ff770a8f7d"). InnerVolumeSpecName "kube-api-access-9tn52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.434572 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tn52\" (UniqueName: \"kubernetes.io/projected/33a346d4-e62c-4390-bbd3-17ff770a8f7d-kube-api-access-9tn52\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.579620 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Nov 25 15:50:27 crc kubenswrapper[4879]: E1125 15:50:27.579987 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a346d4-e62c-4390-bbd3-17ff770a8f7d" containerName="mariadb-client-1" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.580004 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a346d4-e62c-4390-bbd3-17ff770a8f7d" containerName="mariadb-client-1" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.580215 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="33a346d4-e62c-4390-bbd3-17ff770a8f7d" containerName="mariadb-client-1" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.580818 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.586743 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.656199 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33a346d4-e62c-4390-bbd3-17ff770a8f7d" path="/var/lib/kubelet/pods/33a346d4-e62c-4390-bbd3-17ff770a8f7d/volumes" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.739212 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lwvw\" (UniqueName: \"kubernetes.io/projected/e1e8c545-8145-48e8-80f3-347062144a2b-kube-api-access-5lwvw\") pod \"mariadb-client-4-default\" (UID: \"e1e8c545-8145-48e8-80f3-347062144a2b\") " pod="openstack/mariadb-client-4-default" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.769397 4879 scope.go:117] "RemoveContainer" containerID="f73b21b576e8adfeb8148416ea46e2a2abd657724a1af86e8d421bdf4f84840c" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.769578 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.840768 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lwvw\" (UniqueName: \"kubernetes.io/projected/e1e8c545-8145-48e8-80f3-347062144a2b-kube-api-access-5lwvw\") pod \"mariadb-client-4-default\" (UID: \"e1e8c545-8145-48e8-80f3-347062144a2b\") " pod="openstack/mariadb-client-4-default" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.862693 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lwvw\" (UniqueName: \"kubernetes.io/projected/e1e8c545-8145-48e8-80f3-347062144a2b-kube-api-access-5lwvw\") pod \"mariadb-client-4-default\" (UID: \"e1e8c545-8145-48e8-80f3-347062144a2b\") " pod="openstack/mariadb-client-4-default" Nov 25 15:50:27 crc kubenswrapper[4879]: I1125 15:50:27.898096 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 25 15:50:28 crc kubenswrapper[4879]: I1125 15:50:28.188298 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 25 15:50:28 crc kubenswrapper[4879]: I1125 15:50:28.781486 4879 generic.go:334] "Generic (PLEG): container finished" podID="e1e8c545-8145-48e8-80f3-347062144a2b" containerID="d85f5857b32a24f5b93ca075b088dca8b2da5365e77c2786c37d773d3a6bd094" exitCode=0 Nov 25 15:50:28 crc kubenswrapper[4879]: I1125 15:50:28.781653 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"e1e8c545-8145-48e8-80f3-347062144a2b","Type":"ContainerDied","Data":"d85f5857b32a24f5b93ca075b088dca8b2da5365e77c2786c37d773d3a6bd094"} Nov 25 15:50:28 crc kubenswrapper[4879]: I1125 15:50:28.782010 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"e1e8c545-8145-48e8-80f3-347062144a2b","Type":"ContainerStarted","Data":"76648f68d318cdd5134b27939516035333b074a5b5ce744da7678b856e65eebe"} Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.125279 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.142498 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_e1e8c545-8145-48e8-80f3-347062144a2b/mariadb-client-4-default/0.log" Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.171680 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.180009 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.291800 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lwvw\" (UniqueName: \"kubernetes.io/projected/e1e8c545-8145-48e8-80f3-347062144a2b-kube-api-access-5lwvw\") pod \"e1e8c545-8145-48e8-80f3-347062144a2b\" (UID: \"e1e8c545-8145-48e8-80f3-347062144a2b\") " Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.297445 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1e8c545-8145-48e8-80f3-347062144a2b-kube-api-access-5lwvw" (OuterVolumeSpecName: "kube-api-access-5lwvw") pod "e1e8c545-8145-48e8-80f3-347062144a2b" (UID: "e1e8c545-8145-48e8-80f3-347062144a2b"). InnerVolumeSpecName "kube-api-access-5lwvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.393980 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lwvw\" (UniqueName: \"kubernetes.io/projected/e1e8c545-8145-48e8-80f3-347062144a2b-kube-api-access-5lwvw\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.800674 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76648f68d318cdd5134b27939516035333b074a5b5ce744da7678b856e65eebe" Nov 25 15:50:30 crc kubenswrapper[4879]: I1125 15:50:30.800760 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 25 15:50:31 crc kubenswrapper[4879]: I1125 15:50:31.654099 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1e8c545-8145-48e8-80f3-347062144a2b" path="/var/lib/kubelet/pods/e1e8c545-8145-48e8-80f3-347062144a2b/volumes" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.211363 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Nov 25 15:50:34 crc kubenswrapper[4879]: E1125 15:50:34.212364 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e8c545-8145-48e8-80f3-347062144a2b" containerName="mariadb-client-4-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.212380 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e8c545-8145-48e8-80f3-347062144a2b" containerName="mariadb-client-4-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.212533 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e8c545-8145-48e8-80f3-347062144a2b" containerName="mariadb-client-4-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.213280 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.220776 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-h58mr" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.225462 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.358521 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xqxn\" (UniqueName: \"kubernetes.io/projected/8cf1b048-d38d-4fdb-b9b9-3388510f7f10-kube-api-access-6xqxn\") pod \"mariadb-client-5-default\" (UID: \"8cf1b048-d38d-4fdb-b9b9-3388510f7f10\") " pod="openstack/mariadb-client-5-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.459966 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xqxn\" (UniqueName: \"kubernetes.io/projected/8cf1b048-d38d-4fdb-b9b9-3388510f7f10-kube-api-access-6xqxn\") pod \"mariadb-client-5-default\" (UID: \"8cf1b048-d38d-4fdb-b9b9-3388510f7f10\") " pod="openstack/mariadb-client-5-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.482789 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xqxn\" (UniqueName: \"kubernetes.io/projected/8cf1b048-d38d-4fdb-b9b9-3388510f7f10-kube-api-access-6xqxn\") pod \"mariadb-client-5-default\" (UID: \"8cf1b048-d38d-4fdb-b9b9-3388510f7f10\") " pod="openstack/mariadb-client-5-default" Nov 25 15:50:34 crc kubenswrapper[4879]: I1125 15:50:34.543179 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 25 15:50:35 crc kubenswrapper[4879]: I1125 15:50:35.041282 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 25 15:50:35 crc kubenswrapper[4879]: I1125 15:50:35.644589 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:50:35 crc kubenswrapper[4879]: E1125 15:50:35.645159 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:50:35 crc kubenswrapper[4879]: I1125 15:50:35.839690 4879 generic.go:334] "Generic (PLEG): container finished" podID="8cf1b048-d38d-4fdb-b9b9-3388510f7f10" containerID="f637538d4ecc244050a6aef90819d15e374f4f557222f1dd763121bb30005866" exitCode=0 Nov 25 15:50:35 crc kubenswrapper[4879]: I1125 15:50:35.839733 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"8cf1b048-d38d-4fdb-b9b9-3388510f7f10","Type":"ContainerDied","Data":"f637538d4ecc244050a6aef90819d15e374f4f557222f1dd763121bb30005866"} Nov 25 15:50:35 crc kubenswrapper[4879]: I1125 15:50:35.839782 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"8cf1b048-d38d-4fdb-b9b9-3388510f7f10","Type":"ContainerStarted","Data":"bac34a6796b4b2d471cd20c2e41d8b45332b8c36ee5f76fb34f3f58c8aed4758"} Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.175490 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.194096 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_8cf1b048-d38d-4fdb-b9b9-3388510f7f10/mariadb-client-5-default/0.log" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.220232 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.226007 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.300821 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xqxn\" (UniqueName: \"kubernetes.io/projected/8cf1b048-d38d-4fdb-b9b9-3388510f7f10-kube-api-access-6xqxn\") pod \"8cf1b048-d38d-4fdb-b9b9-3388510f7f10\" (UID: \"8cf1b048-d38d-4fdb-b9b9-3388510f7f10\") " Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.306833 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cf1b048-d38d-4fdb-b9b9-3388510f7f10-kube-api-access-6xqxn" (OuterVolumeSpecName: "kube-api-access-6xqxn") pod "8cf1b048-d38d-4fdb-b9b9-3388510f7f10" (UID: "8cf1b048-d38d-4fdb-b9b9-3388510f7f10"). InnerVolumeSpecName "kube-api-access-6xqxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.374769 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Nov 25 15:50:37 crc kubenswrapper[4879]: E1125 15:50:37.375348 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cf1b048-d38d-4fdb-b9b9-3388510f7f10" containerName="mariadb-client-5-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.375443 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cf1b048-d38d-4fdb-b9b9-3388510f7f10" containerName="mariadb-client-5-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.375729 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cf1b048-d38d-4fdb-b9b9-3388510f7f10" containerName="mariadb-client-5-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.376317 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.383365 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.403056 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xqxn\" (UniqueName: \"kubernetes.io/projected/8cf1b048-d38d-4fdb-b9b9-3388510f7f10-kube-api-access-6xqxn\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.504819 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr9xj\" (UniqueName: \"kubernetes.io/projected/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3-kube-api-access-jr9xj\") pod \"mariadb-client-6-default\" (UID: \"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3\") " pod="openstack/mariadb-client-6-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.606670 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr9xj\" (UniqueName: \"kubernetes.io/projected/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3-kube-api-access-jr9xj\") pod \"mariadb-client-6-default\" (UID: \"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3\") " pod="openstack/mariadb-client-6-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.623864 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr9xj\" (UniqueName: \"kubernetes.io/projected/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3-kube-api-access-jr9xj\") pod \"mariadb-client-6-default\" (UID: \"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3\") " pod="openstack/mariadb-client-6-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.655054 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cf1b048-d38d-4fdb-b9b9-3388510f7f10" path="/var/lib/kubelet/pods/8cf1b048-d38d-4fdb-b9b9-3388510f7f10/volumes" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.692415 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.866589 4879 scope.go:117] "RemoveContainer" containerID="f637538d4ecc244050a6aef90819d15e374f4f557222f1dd763121bb30005866" Nov 25 15:50:37 crc kubenswrapper[4879]: I1125 15:50:37.867354 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 25 15:50:38 crc kubenswrapper[4879]: I1125 15:50:38.181646 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 25 15:50:38 crc kubenswrapper[4879]: W1125 15:50:38.190302 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90d5fe2b_35bd_4adc_8fa3_fd27c82856c3.slice/crio-bb8a9541dc67c1b53fdc6ec96b4954fac373bc6e49556d7f4b4607f7ae7b67b9 WatchSource:0}: Error finding container bb8a9541dc67c1b53fdc6ec96b4954fac373bc6e49556d7f4b4607f7ae7b67b9: Status 404 returned error can't find the container with id bb8a9541dc67c1b53fdc6ec96b4954fac373bc6e49556d7f4b4607f7ae7b67b9 Nov 25 15:50:38 crc kubenswrapper[4879]: I1125 15:50:38.877535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3","Type":"ContainerStarted","Data":"3f8e1164c2f52c981b81464636833b3f6f360a27843a150a24f6920b54110033"} Nov 25 15:50:38 crc kubenswrapper[4879]: I1125 15:50:38.878056 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3","Type":"ContainerStarted","Data":"bb8a9541dc67c1b53fdc6ec96b4954fac373bc6e49556d7f4b4607f7ae7b67b9"} Nov 25 15:50:38 crc kubenswrapper[4879]: I1125 15:50:38.893019 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=1.8930001939999999 podStartE2EDuration="1.893000194s" podCreationTimestamp="2025-11-25 15:50:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:50:38.889548092 +0000 UTC m=+5130.492961163" watchObservedRunningTime="2025-11-25 15:50:38.893000194 +0000 UTC m=+5130.496413265" Nov 25 15:50:38 crc kubenswrapper[4879]: I1125 15:50:38.938289 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_90d5fe2b-35bd-4adc-8fa3-fd27c82856c3/mariadb-client-6-default/0.log" Nov 25 15:50:39 crc kubenswrapper[4879]: I1125 15:50:39.888886 4879 generic.go:334] "Generic (PLEG): container finished" podID="90d5fe2b-35bd-4adc-8fa3-fd27c82856c3" containerID="3f8e1164c2f52c981b81464636833b3f6f360a27843a150a24f6920b54110033" exitCode=1 Nov 25 15:50:39 crc kubenswrapper[4879]: I1125 15:50:39.889929 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3","Type":"ContainerDied","Data":"3f8e1164c2f52c981b81464636833b3f6f360a27843a150a24f6920b54110033"} Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.225886 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.257692 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.263344 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.366373 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr9xj\" (UniqueName: \"kubernetes.io/projected/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3-kube-api-access-jr9xj\") pod \"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3\" (UID: \"90d5fe2b-35bd-4adc-8fa3-fd27c82856c3\") " Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.371684 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3-kube-api-access-jr9xj" (OuterVolumeSpecName: "kube-api-access-jr9xj") pod "90d5fe2b-35bd-4adc-8fa3-fd27c82856c3" (UID: "90d5fe2b-35bd-4adc-8fa3-fd27c82856c3"). InnerVolumeSpecName "kube-api-access-jr9xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.439791 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Nov 25 15:50:41 crc kubenswrapper[4879]: E1125 15:50:41.440195 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d5fe2b-35bd-4adc-8fa3-fd27c82856c3" containerName="mariadb-client-6-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.440218 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d5fe2b-35bd-4adc-8fa3-fd27c82856c3" containerName="mariadb-client-6-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.440408 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d5fe2b-35bd-4adc-8fa3-fd27c82856c3" containerName="mariadb-client-6-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.441033 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.449574 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.468444 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr9xj\" (UniqueName: \"kubernetes.io/projected/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3-kube-api-access-jr9xj\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.570085 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2mhz\" (UniqueName: \"kubernetes.io/projected/97a2c728-a5d3-4948-818c-a937b855c966-kube-api-access-z2mhz\") pod \"mariadb-client-7-default\" (UID: \"97a2c728-a5d3-4948-818c-a937b855c966\") " pod="openstack/mariadb-client-7-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.654452 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90d5fe2b-35bd-4adc-8fa3-fd27c82856c3" path="/var/lib/kubelet/pods/90d5fe2b-35bd-4adc-8fa3-fd27c82856c3/volumes" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.672113 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2mhz\" (UniqueName: \"kubernetes.io/projected/97a2c728-a5d3-4948-818c-a937b855c966-kube-api-access-z2mhz\") pod \"mariadb-client-7-default\" (UID: \"97a2c728-a5d3-4948-818c-a937b855c966\") " pod="openstack/mariadb-client-7-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.690869 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2mhz\" (UniqueName: \"kubernetes.io/projected/97a2c728-a5d3-4948-818c-a937b855c966-kube-api-access-z2mhz\") pod \"mariadb-client-7-default\" (UID: \"97a2c728-a5d3-4948-818c-a937b855c966\") " pod="openstack/mariadb-client-7-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.758724 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.905048 4879 scope.go:117] "RemoveContainer" containerID="3f8e1164c2f52c981b81464636833b3f6f360a27843a150a24f6920b54110033" Nov 25 15:50:41 crc kubenswrapper[4879]: I1125 15:50:41.905466 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 25 15:50:42 crc kubenswrapper[4879]: I1125 15:50:42.246658 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 25 15:50:42 crc kubenswrapper[4879]: I1125 15:50:42.918272 4879 generic.go:334] "Generic (PLEG): container finished" podID="97a2c728-a5d3-4948-818c-a937b855c966" containerID="5f4ab9268de1f5c2a6631e232fd55bd422a193069f41d1f8dce35c3ae1dc8403" exitCode=0 Nov 25 15:50:42 crc kubenswrapper[4879]: I1125 15:50:42.918468 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"97a2c728-a5d3-4948-818c-a937b855c966","Type":"ContainerDied","Data":"5f4ab9268de1f5c2a6631e232fd55bd422a193069f41d1f8dce35c3ae1dc8403"} Nov 25 15:50:42 crc kubenswrapper[4879]: I1125 15:50:42.918579 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"97a2c728-a5d3-4948-818c-a937b855c966","Type":"ContainerStarted","Data":"c815640a87320705e3f43babc0a883c9cab317201e4bf8f873183277268a6c66"} Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.256746 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.273614 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_97a2c728-a5d3-4948-818c-a937b855c966/mariadb-client-7-default/0.log" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.301738 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.307851 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.416582 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2mhz\" (UniqueName: \"kubernetes.io/projected/97a2c728-a5d3-4948-818c-a937b855c966-kube-api-access-z2mhz\") pod \"97a2c728-a5d3-4948-818c-a937b855c966\" (UID: \"97a2c728-a5d3-4948-818c-a937b855c966\") " Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.421197 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97a2c728-a5d3-4948-818c-a937b855c966-kube-api-access-z2mhz" (OuterVolumeSpecName: "kube-api-access-z2mhz") pod "97a2c728-a5d3-4948-818c-a937b855c966" (UID: "97a2c728-a5d3-4948-818c-a937b855c966"). InnerVolumeSpecName "kube-api-access-z2mhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.441638 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Nov 25 15:50:44 crc kubenswrapper[4879]: E1125 15:50:44.442011 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97a2c728-a5d3-4948-818c-a937b855c966" containerName="mariadb-client-7-default" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.442096 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="97a2c728-a5d3-4948-818c-a937b855c966" containerName="mariadb-client-7-default" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.442391 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="97a2c728-a5d3-4948-818c-a937b855c966" containerName="mariadb-client-7-default" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.442988 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.450085 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.518770 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2mhz\" (UniqueName: \"kubernetes.io/projected/97a2c728-a5d3-4948-818c-a937b855c966-kube-api-access-z2mhz\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.619738 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swg5v\" (UniqueName: \"kubernetes.io/projected/537cdefc-8899-4bb5-b5af-d97ff3b239fd-kube-api-access-swg5v\") pod \"mariadb-client-2\" (UID: \"537cdefc-8899-4bb5-b5af-d97ff3b239fd\") " pod="openstack/mariadb-client-2" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.721671 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swg5v\" (UniqueName: \"kubernetes.io/projected/537cdefc-8899-4bb5-b5af-d97ff3b239fd-kube-api-access-swg5v\") pod \"mariadb-client-2\" (UID: \"537cdefc-8899-4bb5-b5af-d97ff3b239fd\") " pod="openstack/mariadb-client-2" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.740010 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swg5v\" (UniqueName: \"kubernetes.io/projected/537cdefc-8899-4bb5-b5af-d97ff3b239fd-kube-api-access-swg5v\") pod \"mariadb-client-2\" (UID: \"537cdefc-8899-4bb5-b5af-d97ff3b239fd\") " pod="openstack/mariadb-client-2" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.771979 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.938464 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c815640a87320705e3f43babc0a883c9cab317201e4bf8f873183277268a6c66" Nov 25 15:50:44 crc kubenswrapper[4879]: I1125 15:50:44.938509 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 25 15:50:45 crc kubenswrapper[4879]: I1125 15:50:45.235941 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 25 15:50:45 crc kubenswrapper[4879]: W1125 15:50:45.240173 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod537cdefc_8899_4bb5_b5af_d97ff3b239fd.slice/crio-d1668e97d2856184664c7e5099b1c87fa589d5c46f9504fd67180482cf63449b WatchSource:0}: Error finding container d1668e97d2856184664c7e5099b1c87fa589d5c46f9504fd67180482cf63449b: Status 404 returned error can't find the container with id d1668e97d2856184664c7e5099b1c87fa589d5c46f9504fd67180482cf63449b Nov 25 15:50:45 crc kubenswrapper[4879]: I1125 15:50:45.653846 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97a2c728-a5d3-4948-818c-a937b855c966" path="/var/lib/kubelet/pods/97a2c728-a5d3-4948-818c-a937b855c966/volumes" Nov 25 15:50:45 crc kubenswrapper[4879]: I1125 15:50:45.951117 4879 generic.go:334] "Generic (PLEG): container finished" podID="537cdefc-8899-4bb5-b5af-d97ff3b239fd" containerID="f6a4caed0cd1e60d5f56d720de47316eb601cd9472da608272e9cfd82b77dde8" exitCode=0 Nov 25 15:50:45 crc kubenswrapper[4879]: I1125 15:50:45.951213 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"537cdefc-8899-4bb5-b5af-d97ff3b239fd","Type":"ContainerDied","Data":"f6a4caed0cd1e60d5f56d720de47316eb601cd9472da608272e9cfd82b77dde8"} Nov 25 15:50:45 crc kubenswrapper[4879]: I1125 15:50:45.951540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"537cdefc-8899-4bb5-b5af-d97ff3b239fd","Type":"ContainerStarted","Data":"d1668e97d2856184664c7e5099b1c87fa589d5c46f9504fd67180482cf63449b"} Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.261986 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.279221 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_537cdefc-8899-4bb5-b5af-d97ff3b239fd/mariadb-client-2/0.log" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.306148 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.311819 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.358471 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swg5v\" (UniqueName: \"kubernetes.io/projected/537cdefc-8899-4bb5-b5af-d97ff3b239fd-kube-api-access-swg5v\") pod \"537cdefc-8899-4bb5-b5af-d97ff3b239fd\" (UID: \"537cdefc-8899-4bb5-b5af-d97ff3b239fd\") " Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.363267 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537cdefc-8899-4bb5-b5af-d97ff3b239fd-kube-api-access-swg5v" (OuterVolumeSpecName: "kube-api-access-swg5v") pod "537cdefc-8899-4bb5-b5af-d97ff3b239fd" (UID: "537cdefc-8899-4bb5-b5af-d97ff3b239fd"). InnerVolumeSpecName "kube-api-access-swg5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.461404 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swg5v\" (UniqueName: \"kubernetes.io/projected/537cdefc-8899-4bb5-b5af-d97ff3b239fd-kube-api-access-swg5v\") on node \"crc\" DevicePath \"\"" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.644737 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:50:47 crc kubenswrapper[4879]: E1125 15:50:47.645092 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.652498 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="537cdefc-8899-4bb5-b5af-d97ff3b239fd" path="/var/lib/kubelet/pods/537cdefc-8899-4bb5-b5af-d97ff3b239fd/volumes" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.975757 4879 scope.go:117] "RemoveContainer" containerID="f6a4caed0cd1e60d5f56d720de47316eb601cd9472da608272e9cfd82b77dde8" Nov 25 15:50:47 crc kubenswrapper[4879]: I1125 15:50:47.975807 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 25 15:51:00 crc kubenswrapper[4879]: I1125 15:51:00.644389 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:51:00 crc kubenswrapper[4879]: E1125 15:51:00.645199 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:51:15 crc kubenswrapper[4879]: I1125 15:51:15.645165 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:51:15 crc kubenswrapper[4879]: E1125 15:51:15.646564 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.575666 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6g72n"] Nov 25 15:51:24 crc kubenswrapper[4879]: E1125 15:51:24.576496 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537cdefc-8899-4bb5-b5af-d97ff3b239fd" containerName="mariadb-client-2" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.576509 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="537cdefc-8899-4bb5-b5af-d97ff3b239fd" containerName="mariadb-client-2" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.576655 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="537cdefc-8899-4bb5-b5af-d97ff3b239fd" containerName="mariadb-client-2" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.577782 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.590600 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6g72n"] Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.739032 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-utilities\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.739390 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnclp\" (UniqueName: \"kubernetes.io/projected/8b07fe4a-22d0-434f-bb69-85a7ed09785e-kube-api-access-mnclp\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.739615 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-catalog-content\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.840993 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-catalog-content\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.841205 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-utilities\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.841257 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnclp\" (UniqueName: \"kubernetes.io/projected/8b07fe4a-22d0-434f-bb69-85a7ed09785e-kube-api-access-mnclp\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.841599 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-catalog-content\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.841668 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-utilities\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.862771 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnclp\" (UniqueName: \"kubernetes.io/projected/8b07fe4a-22d0-434f-bb69-85a7ed09785e-kube-api-access-mnclp\") pod \"community-operators-6g72n\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:24 crc kubenswrapper[4879]: I1125 15:51:24.901655 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:25 crc kubenswrapper[4879]: I1125 15:51:25.385625 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6g72n"] Nov 25 15:51:26 crc kubenswrapper[4879]: I1125 15:51:26.302016 4879 generic.go:334] "Generic (PLEG): container finished" podID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerID="e161ab1482613c5866a40e5bbce6b7c02dc3f73375417d114c075aaae9f53145" exitCode=0 Nov 25 15:51:26 crc kubenswrapper[4879]: I1125 15:51:26.302149 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerDied","Data":"e161ab1482613c5866a40e5bbce6b7c02dc3f73375417d114c075aaae9f53145"} Nov 25 15:51:26 crc kubenswrapper[4879]: I1125 15:51:26.303635 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerStarted","Data":"6a95decc13d0e30d7a0d9c9c6cee944c73fdb3aa300f71ca8392541bb518ed02"} Nov 25 15:51:26 crc kubenswrapper[4879]: I1125 15:51:26.304233 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:51:27 crc kubenswrapper[4879]: I1125 15:51:27.312415 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerStarted","Data":"bd183c1a985e782c086acdd5e4e4e0166b35e8435f7f57b5aa20f6de43275a5f"} Nov 25 15:51:27 crc kubenswrapper[4879]: I1125 15:51:27.645293 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:51:27 crc kubenswrapper[4879]: E1125 15:51:27.646197 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:51:28 crc kubenswrapper[4879]: I1125 15:51:28.321466 4879 generic.go:334] "Generic (PLEG): container finished" podID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerID="bd183c1a985e782c086acdd5e4e4e0166b35e8435f7f57b5aa20f6de43275a5f" exitCode=0 Nov 25 15:51:28 crc kubenswrapper[4879]: I1125 15:51:28.321528 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerDied","Data":"bd183c1a985e782c086acdd5e4e4e0166b35e8435f7f57b5aa20f6de43275a5f"} Nov 25 15:51:29 crc kubenswrapper[4879]: I1125 15:51:29.331459 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerStarted","Data":"1aa87671cba587a59cf7cc4959d7052637099b6027536c419bb483065afc90ab"} Nov 25 15:51:29 crc kubenswrapper[4879]: I1125 15:51:29.352458 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6g72n" podStartSLOduration=2.743967572 podStartE2EDuration="5.352441711s" podCreationTimestamp="2025-11-25 15:51:24 +0000 UTC" firstStartedPulling="2025-11-25 15:51:26.303917029 +0000 UTC m=+5177.907330100" lastFinishedPulling="2025-11-25 15:51:28.912391168 +0000 UTC m=+5180.515804239" observedRunningTime="2025-11-25 15:51:29.348687021 +0000 UTC m=+5180.952100122" watchObservedRunningTime="2025-11-25 15:51:29.352441711 +0000 UTC m=+5180.955854782" Nov 25 15:51:34 crc kubenswrapper[4879]: I1125 15:51:34.902226 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:34 crc kubenswrapper[4879]: I1125 15:51:34.903789 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:34 crc kubenswrapper[4879]: I1125 15:51:34.954731 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:35 crc kubenswrapper[4879]: I1125 15:51:35.425034 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:35 crc kubenswrapper[4879]: I1125 15:51:35.478177 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6g72n"] Nov 25 15:51:37 crc kubenswrapper[4879]: I1125 15:51:37.395199 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6g72n" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="registry-server" containerID="cri-o://1aa87671cba587a59cf7cc4959d7052637099b6027536c419bb483065afc90ab" gracePeriod=2 Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.407859 4879 generic.go:334] "Generic (PLEG): container finished" podID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerID="1aa87671cba587a59cf7cc4959d7052637099b6027536c419bb483065afc90ab" exitCode=0 Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.408138 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerDied","Data":"1aa87671cba587a59cf7cc4959d7052637099b6027536c419bb483065afc90ab"} Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.626942 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.651378 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:51:38 crc kubenswrapper[4879]: E1125 15:51:38.651636 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.765724 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnclp\" (UniqueName: \"kubernetes.io/projected/8b07fe4a-22d0-434f-bb69-85a7ed09785e-kube-api-access-mnclp\") pod \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.765852 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-catalog-content\") pod \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.765954 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-utilities\") pod \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\" (UID: \"8b07fe4a-22d0-434f-bb69-85a7ed09785e\") " Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.767536 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-utilities" (OuterVolumeSpecName: "utilities") pod "8b07fe4a-22d0-434f-bb69-85a7ed09785e" (UID: "8b07fe4a-22d0-434f-bb69-85a7ed09785e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.771762 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b07fe4a-22d0-434f-bb69-85a7ed09785e-kube-api-access-mnclp" (OuterVolumeSpecName: "kube-api-access-mnclp") pod "8b07fe4a-22d0-434f-bb69-85a7ed09785e" (UID: "8b07fe4a-22d0-434f-bb69-85a7ed09785e"). InnerVolumeSpecName "kube-api-access-mnclp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.812497 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b07fe4a-22d0-434f-bb69-85a7ed09785e" (UID: "8b07fe4a-22d0-434f-bb69-85a7ed09785e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.867898 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.867934 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnclp\" (UniqueName: \"kubernetes.io/projected/8b07fe4a-22d0-434f-bb69-85a7ed09785e-kube-api-access-mnclp\") on node \"crc\" DevicePath \"\"" Nov 25 15:51:38 crc kubenswrapper[4879]: I1125 15:51:38.867949 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b07fe4a-22d0-434f-bb69-85a7ed09785e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.418188 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6g72n" event={"ID":"8b07fe4a-22d0-434f-bb69-85a7ed09785e","Type":"ContainerDied","Data":"6a95decc13d0e30d7a0d9c9c6cee944c73fdb3aa300f71ca8392541bb518ed02"} Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.418246 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6g72n" Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.418251 4879 scope.go:117] "RemoveContainer" containerID="1aa87671cba587a59cf7cc4959d7052637099b6027536c419bb483065afc90ab" Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.446582 4879 scope.go:117] "RemoveContainer" containerID="bd183c1a985e782c086acdd5e4e4e0166b35e8435f7f57b5aa20f6de43275a5f" Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.446830 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6g72n"] Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.453036 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6g72n"] Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.466179 4879 scope.go:117] "RemoveContainer" containerID="e161ab1482613c5866a40e5bbce6b7c02dc3f73375417d114c075aaae9f53145" Nov 25 15:51:39 crc kubenswrapper[4879]: I1125 15:51:39.654567 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" path="/var/lib/kubelet/pods/8b07fe4a-22d0-434f-bb69-85a7ed09785e/volumes" Nov 25 15:51:42 crc kubenswrapper[4879]: I1125 15:51:42.808900 4879 scope.go:117] "RemoveContainer" containerID="bc1421b1c2558e58ff1ba0c8426057af995da9433e47e1faa2d66169dda8e680" Nov 25 15:51:52 crc kubenswrapper[4879]: I1125 15:51:52.645268 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:51:52 crc kubenswrapper[4879]: E1125 15:51:52.646059 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:52:03 crc kubenswrapper[4879]: I1125 15:52:03.645693 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:52:03 crc kubenswrapper[4879]: E1125 15:52:03.646623 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:52:17 crc kubenswrapper[4879]: I1125 15:52:17.645315 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:52:17 crc kubenswrapper[4879]: E1125 15:52:17.646065 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:52:31 crc kubenswrapper[4879]: I1125 15:52:31.644879 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:52:31 crc kubenswrapper[4879]: E1125 15:52:31.645974 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:52:43 crc kubenswrapper[4879]: I1125 15:52:43.644723 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:52:43 crc kubenswrapper[4879]: E1125 15:52:43.645486 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:52:57 crc kubenswrapper[4879]: I1125 15:52:57.644756 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:52:57 crc kubenswrapper[4879]: E1125 15:52:57.645621 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:53:12 crc kubenswrapper[4879]: I1125 15:53:12.645025 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:53:12 crc kubenswrapper[4879]: E1125 15:53:12.645654 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:53:25 crc kubenswrapper[4879]: I1125 15:53:25.645361 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:53:25 crc kubenswrapper[4879]: E1125 15:53:25.646237 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.535386 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p7tj8"] Nov 25 15:53:32 crc kubenswrapper[4879]: E1125 15:53:32.536301 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="extract-content" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.536317 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="extract-content" Nov 25 15:53:32 crc kubenswrapper[4879]: E1125 15:53:32.536339 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="registry-server" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.536344 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="registry-server" Nov 25 15:53:32 crc kubenswrapper[4879]: E1125 15:53:32.536374 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="extract-utilities" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.536380 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="extract-utilities" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.536554 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b07fe4a-22d0-434f-bb69-85a7ed09785e" containerName="registry-server" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.538249 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.551742 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7tj8"] Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.692339 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-catalog-content\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.692406 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-utilities\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.692614 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52c9z\" (UniqueName: \"kubernetes.io/projected/6c5aee5c-dc96-4c68-8980-f903915f352e-kube-api-access-52c9z\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.793826 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-utilities\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.793885 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52c9z\" (UniqueName: \"kubernetes.io/projected/6c5aee5c-dc96-4c68-8980-f903915f352e-kube-api-access-52c9z\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.793969 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-catalog-content\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.794468 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-utilities\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.794487 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-catalog-content\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.838722 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52c9z\" (UniqueName: \"kubernetes.io/projected/6c5aee5c-dc96-4c68-8980-f903915f352e-kube-api-access-52c9z\") pod \"redhat-marketplace-p7tj8\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:32 crc kubenswrapper[4879]: I1125 15:53:32.858369 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:33 crc kubenswrapper[4879]: I1125 15:53:33.278865 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7tj8"] Nov 25 15:53:33 crc kubenswrapper[4879]: I1125 15:53:33.313932 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7tj8" event={"ID":"6c5aee5c-dc96-4c68-8980-f903915f352e","Type":"ContainerStarted","Data":"4b02058715fa0e816eee14c8f62a64f41ccedb350ecdc7ad2c854a079dcf256e"} Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.322346 4879 generic.go:334] "Generic (PLEG): container finished" podID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerID="7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d" exitCode=0 Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.322456 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7tj8" event={"ID":"6c5aee5c-dc96-4c68-8980-f903915f352e","Type":"ContainerDied","Data":"7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d"} Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.739174 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9zrd6"] Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.741627 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.758208 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9zrd6"] Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.829934 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk4nd\" (UniqueName: \"kubernetes.io/projected/da091ade-ee09-42fb-90c2-5307cce2401c-kube-api-access-pk4nd\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.830004 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-catalog-content\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.830054 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-utilities\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.931399 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk4nd\" (UniqueName: \"kubernetes.io/projected/da091ade-ee09-42fb-90c2-5307cce2401c-kube-api-access-pk4nd\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.931672 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-catalog-content\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.931765 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-utilities\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.932271 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-catalog-content\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.932545 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-utilities\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:34 crc kubenswrapper[4879]: I1125 15:53:34.953323 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk4nd\" (UniqueName: \"kubernetes.io/projected/da091ade-ee09-42fb-90c2-5307cce2401c-kube-api-access-pk4nd\") pod \"redhat-operators-9zrd6\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:35 crc kubenswrapper[4879]: I1125 15:53:35.068517 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:35 crc kubenswrapper[4879]: I1125 15:53:35.504699 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9zrd6"] Nov 25 15:53:35 crc kubenswrapper[4879]: W1125 15:53:35.508426 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda091ade_ee09_42fb_90c2_5307cce2401c.slice/crio-c168f4aedd69cb18cf3ffb9051bd6a7100da88cbd2b242294bbac20338ae16a3 WatchSource:0}: Error finding container c168f4aedd69cb18cf3ffb9051bd6a7100da88cbd2b242294bbac20338ae16a3: Status 404 returned error can't find the container with id c168f4aedd69cb18cf3ffb9051bd6a7100da88cbd2b242294bbac20338ae16a3 Nov 25 15:53:36 crc kubenswrapper[4879]: I1125 15:53:36.338215 4879 generic.go:334] "Generic (PLEG): container finished" podID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerID="434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f" exitCode=0 Nov 25 15:53:36 crc kubenswrapper[4879]: I1125 15:53:36.338277 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7tj8" event={"ID":"6c5aee5c-dc96-4c68-8980-f903915f352e","Type":"ContainerDied","Data":"434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f"} Nov 25 15:53:36 crc kubenswrapper[4879]: I1125 15:53:36.339698 4879 generic.go:334] "Generic (PLEG): container finished" podID="da091ade-ee09-42fb-90c2-5307cce2401c" containerID="a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092" exitCode=0 Nov 25 15:53:36 crc kubenswrapper[4879]: I1125 15:53:36.339722 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9zrd6" event={"ID":"da091ade-ee09-42fb-90c2-5307cce2401c","Type":"ContainerDied","Data":"a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092"} Nov 25 15:53:36 crc kubenswrapper[4879]: I1125 15:53:36.339755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9zrd6" event={"ID":"da091ade-ee09-42fb-90c2-5307cce2401c","Type":"ContainerStarted","Data":"c168f4aedd69cb18cf3ffb9051bd6a7100da88cbd2b242294bbac20338ae16a3"} Nov 25 15:53:37 crc kubenswrapper[4879]: I1125 15:53:37.349617 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7tj8" event={"ID":"6c5aee5c-dc96-4c68-8980-f903915f352e","Type":"ContainerStarted","Data":"e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35"} Nov 25 15:53:37 crc kubenswrapper[4879]: I1125 15:53:37.368781 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p7tj8" podStartSLOduration=2.811057868 podStartE2EDuration="5.368763174s" podCreationTimestamp="2025-11-25 15:53:32 +0000 UTC" firstStartedPulling="2025-11-25 15:53:34.324477346 +0000 UTC m=+5305.927890417" lastFinishedPulling="2025-11-25 15:53:36.882182652 +0000 UTC m=+5308.485595723" observedRunningTime="2025-11-25 15:53:37.366931106 +0000 UTC m=+5308.970344197" watchObservedRunningTime="2025-11-25 15:53:37.368763174 +0000 UTC m=+5308.972176255" Nov 25 15:53:38 crc kubenswrapper[4879]: I1125 15:53:38.359842 4879 generic.go:334] "Generic (PLEG): container finished" podID="da091ade-ee09-42fb-90c2-5307cce2401c" containerID="b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240" exitCode=0 Nov 25 15:53:38 crc kubenswrapper[4879]: I1125 15:53:38.359929 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9zrd6" event={"ID":"da091ade-ee09-42fb-90c2-5307cce2401c","Type":"ContainerDied","Data":"b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240"} Nov 25 15:53:40 crc kubenswrapper[4879]: I1125 15:53:40.376085 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9zrd6" event={"ID":"da091ade-ee09-42fb-90c2-5307cce2401c","Type":"ContainerStarted","Data":"53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd"} Nov 25 15:53:40 crc kubenswrapper[4879]: I1125 15:53:40.395015 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9zrd6" podStartSLOduration=2.995256987 podStartE2EDuration="6.394992038s" podCreationTimestamp="2025-11-25 15:53:34 +0000 UTC" firstStartedPulling="2025-11-25 15:53:36.340784877 +0000 UTC m=+5307.944197948" lastFinishedPulling="2025-11-25 15:53:39.740519918 +0000 UTC m=+5311.343932999" observedRunningTime="2025-11-25 15:53:40.389941293 +0000 UTC m=+5311.993354364" watchObservedRunningTime="2025-11-25 15:53:40.394992038 +0000 UTC m=+5311.998405109" Nov 25 15:53:40 crc kubenswrapper[4879]: I1125 15:53:40.644519 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:53:40 crc kubenswrapper[4879]: E1125 15:53:40.644745 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:53:42 crc kubenswrapper[4879]: I1125 15:53:42.859047 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:42 crc kubenswrapper[4879]: I1125 15:53:42.859326 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:42 crc kubenswrapper[4879]: I1125 15:53:42.904657 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:43 crc kubenswrapper[4879]: I1125 15:53:43.443816 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:44 crc kubenswrapper[4879]: I1125 15:53:44.730018 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7tj8"] Nov 25 15:53:45 crc kubenswrapper[4879]: I1125 15:53:45.069099 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:45 crc kubenswrapper[4879]: I1125 15:53:45.069225 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:45 crc kubenswrapper[4879]: I1125 15:53:45.117058 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:45 crc kubenswrapper[4879]: I1125 15:53:45.418988 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p7tj8" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="registry-server" containerID="cri-o://e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35" gracePeriod=2 Nov 25 15:53:45 crc kubenswrapper[4879]: I1125 15:53:45.461476 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:45 crc kubenswrapper[4879]: I1125 15:53:45.864372 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.014398 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-catalog-content\") pod \"6c5aee5c-dc96-4c68-8980-f903915f352e\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.014518 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52c9z\" (UniqueName: \"kubernetes.io/projected/6c5aee5c-dc96-4c68-8980-f903915f352e-kube-api-access-52c9z\") pod \"6c5aee5c-dc96-4c68-8980-f903915f352e\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.014548 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-utilities\") pod \"6c5aee5c-dc96-4c68-8980-f903915f352e\" (UID: \"6c5aee5c-dc96-4c68-8980-f903915f352e\") " Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.015675 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-utilities" (OuterVolumeSpecName: "utilities") pod "6c5aee5c-dc96-4c68-8980-f903915f352e" (UID: "6c5aee5c-dc96-4c68-8980-f903915f352e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.020512 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c5aee5c-dc96-4c68-8980-f903915f352e-kube-api-access-52c9z" (OuterVolumeSpecName: "kube-api-access-52c9z") pod "6c5aee5c-dc96-4c68-8980-f903915f352e" (UID: "6c5aee5c-dc96-4c68-8980-f903915f352e"). InnerVolumeSpecName "kube-api-access-52c9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.038599 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c5aee5c-dc96-4c68-8980-f903915f352e" (UID: "6c5aee5c-dc96-4c68-8980-f903915f352e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.116247 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52c9z\" (UniqueName: \"kubernetes.io/projected/6c5aee5c-dc96-4c68-8980-f903915f352e-kube-api-access-52c9z\") on node \"crc\" DevicePath \"\"" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.116288 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.116299 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5aee5c-dc96-4c68-8980-f903915f352e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.428625 4879 generic.go:334] "Generic (PLEG): container finished" podID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerID="e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35" exitCode=0 Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.428764 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7tj8" event={"ID":"6c5aee5c-dc96-4c68-8980-f903915f352e","Type":"ContainerDied","Data":"e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35"} Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.428842 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7tj8" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.429234 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7tj8" event={"ID":"6c5aee5c-dc96-4c68-8980-f903915f352e","Type":"ContainerDied","Data":"4b02058715fa0e816eee14c8f62a64f41ccedb350ecdc7ad2c854a079dcf256e"} Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.429350 4879 scope.go:117] "RemoveContainer" containerID="e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.446620 4879 scope.go:117] "RemoveContainer" containerID="434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.466961 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7tj8"] Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.476108 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7tj8"] Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.481060 4879 scope.go:117] "RemoveContainer" containerID="7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.499530 4879 scope.go:117] "RemoveContainer" containerID="e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35" Nov 25 15:53:46 crc kubenswrapper[4879]: E1125 15:53:46.500009 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35\": container with ID starting with e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35 not found: ID does not exist" containerID="e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.500066 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35"} err="failed to get container status \"e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35\": rpc error: code = NotFound desc = could not find container \"e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35\": container with ID starting with e62b5b3f0de61ac0be90a4f4750d8ab43dc84489121e7f5a55b0492851f9cd35 not found: ID does not exist" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.500102 4879 scope.go:117] "RemoveContainer" containerID="434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f" Nov 25 15:53:46 crc kubenswrapper[4879]: E1125 15:53:46.500551 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f\": container with ID starting with 434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f not found: ID does not exist" containerID="434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.500588 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f"} err="failed to get container status \"434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f\": rpc error: code = NotFound desc = could not find container \"434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f\": container with ID starting with 434c2da8c70e8197db045de360a519ea4ae6286d1570c7a7db6e911c15d5506f not found: ID does not exist" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.500617 4879 scope.go:117] "RemoveContainer" containerID="7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d" Nov 25 15:53:46 crc kubenswrapper[4879]: E1125 15:53:46.500901 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d\": container with ID starting with 7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d not found: ID does not exist" containerID="7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d" Nov 25 15:53:46 crc kubenswrapper[4879]: I1125 15:53:46.501007 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d"} err="failed to get container status \"7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d\": rpc error: code = NotFound desc = could not find container \"7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d\": container with ID starting with 7df256ab2eb54846d513ceac518f950f3c64debb26f654f42fff932a5112877d not found: ID does not exist" Nov 25 15:53:47 crc kubenswrapper[4879]: I1125 15:53:47.528939 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9zrd6"] Nov 25 15:53:47 crc kubenswrapper[4879]: I1125 15:53:47.529372 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9zrd6" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="registry-server" containerID="cri-o://53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd" gracePeriod=2 Nov 25 15:53:47 crc kubenswrapper[4879]: I1125 15:53:47.655251 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" path="/var/lib/kubelet/pods/6c5aee5c-dc96-4c68-8980-f903915f352e/volumes" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.010048 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.146448 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk4nd\" (UniqueName: \"kubernetes.io/projected/da091ade-ee09-42fb-90c2-5307cce2401c-kube-api-access-pk4nd\") pod \"da091ade-ee09-42fb-90c2-5307cce2401c\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.146511 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-catalog-content\") pod \"da091ade-ee09-42fb-90c2-5307cce2401c\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.146593 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-utilities\") pod \"da091ade-ee09-42fb-90c2-5307cce2401c\" (UID: \"da091ade-ee09-42fb-90c2-5307cce2401c\") " Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.147684 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-utilities" (OuterVolumeSpecName: "utilities") pod "da091ade-ee09-42fb-90c2-5307cce2401c" (UID: "da091ade-ee09-42fb-90c2-5307cce2401c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.151923 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da091ade-ee09-42fb-90c2-5307cce2401c-kube-api-access-pk4nd" (OuterVolumeSpecName: "kube-api-access-pk4nd") pod "da091ade-ee09-42fb-90c2-5307cce2401c" (UID: "da091ade-ee09-42fb-90c2-5307cce2401c"). InnerVolumeSpecName "kube-api-access-pk4nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.248463 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk4nd\" (UniqueName: \"kubernetes.io/projected/da091ade-ee09-42fb-90c2-5307cce2401c-kube-api-access-pk4nd\") on node \"crc\" DevicePath \"\"" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.248499 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.285500 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da091ade-ee09-42fb-90c2-5307cce2401c" (UID: "da091ade-ee09-42fb-90c2-5307cce2401c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.349918 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da091ade-ee09-42fb-90c2-5307cce2401c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.449642 4879 generic.go:334] "Generic (PLEG): container finished" podID="da091ade-ee09-42fb-90c2-5307cce2401c" containerID="53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd" exitCode=0 Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.449698 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9zrd6" event={"ID":"da091ade-ee09-42fb-90c2-5307cce2401c","Type":"ContainerDied","Data":"53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd"} Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.449705 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9zrd6" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.449731 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9zrd6" event={"ID":"da091ade-ee09-42fb-90c2-5307cce2401c","Type":"ContainerDied","Data":"c168f4aedd69cb18cf3ffb9051bd6a7100da88cbd2b242294bbac20338ae16a3"} Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.449750 4879 scope.go:117] "RemoveContainer" containerID="53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.478686 4879 scope.go:117] "RemoveContainer" containerID="b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.488016 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9zrd6"] Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.496912 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9zrd6"] Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.510069 4879 scope.go:117] "RemoveContainer" containerID="a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.528476 4879 scope.go:117] "RemoveContainer" containerID="53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd" Nov 25 15:53:48 crc kubenswrapper[4879]: E1125 15:53:48.529043 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd\": container with ID starting with 53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd not found: ID does not exist" containerID="53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.529077 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd"} err="failed to get container status \"53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd\": rpc error: code = NotFound desc = could not find container \"53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd\": container with ID starting with 53895cccd067d2633d586d998706c309451ceb1b4b879bc18789e72e832016fd not found: ID does not exist" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.529116 4879 scope.go:117] "RemoveContainer" containerID="b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240" Nov 25 15:53:48 crc kubenswrapper[4879]: E1125 15:53:48.529503 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240\": container with ID starting with b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240 not found: ID does not exist" containerID="b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.529531 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240"} err="failed to get container status \"b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240\": rpc error: code = NotFound desc = could not find container \"b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240\": container with ID starting with b1167a016aa3b24bc59ad739efe9f1e0f47fa7c24c6915500f7c9e252dd6a240 not found: ID does not exist" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.529550 4879 scope.go:117] "RemoveContainer" containerID="a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092" Nov 25 15:53:48 crc kubenswrapper[4879]: E1125 15:53:48.529882 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092\": container with ID starting with a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092 not found: ID does not exist" containerID="a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092" Nov 25 15:53:48 crc kubenswrapper[4879]: I1125 15:53:48.529909 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092"} err="failed to get container status \"a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092\": rpc error: code = NotFound desc = could not find container \"a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092\": container with ID starting with a5e476d59e2ae6258478792a2fbb6aba5314d2f475b865467ac0808977057092 not found: ID does not exist" Nov 25 15:53:49 crc kubenswrapper[4879]: I1125 15:53:49.657611 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" path="/var/lib/kubelet/pods/da091ade-ee09-42fb-90c2-5307cce2401c/volumes" Nov 25 15:53:51 crc kubenswrapper[4879]: I1125 15:53:51.645435 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:53:51 crc kubenswrapper[4879]: E1125 15:53:51.645964 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:54:06 crc kubenswrapper[4879]: I1125 15:54:06.645321 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:54:06 crc kubenswrapper[4879]: E1125 15:54:06.647352 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:54:18 crc kubenswrapper[4879]: I1125 15:54:18.645402 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:54:18 crc kubenswrapper[4879]: E1125 15:54:18.646704 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:54:29 crc kubenswrapper[4879]: I1125 15:54:29.650442 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:54:29 crc kubenswrapper[4879]: E1125 15:54:29.651334 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.733211 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Nov 25 15:54:39 crc kubenswrapper[4879]: E1125 15:54:39.734349 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="registry-server" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734369 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="registry-server" Nov 25 15:54:39 crc kubenswrapper[4879]: E1125 15:54:39.734398 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="extract-content" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734407 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="extract-content" Nov 25 15:54:39 crc kubenswrapper[4879]: E1125 15:54:39.734421 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="extract-utilities" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734428 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="extract-utilities" Nov 25 15:54:39 crc kubenswrapper[4879]: E1125 15:54:39.734447 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="extract-utilities" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734455 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="extract-utilities" Nov 25 15:54:39 crc kubenswrapper[4879]: E1125 15:54:39.734481 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="registry-server" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734489 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="registry-server" Nov 25 15:54:39 crc kubenswrapper[4879]: E1125 15:54:39.734507 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="extract-content" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734514 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="extract-content" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734698 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c5aee5c-dc96-4c68-8980-f903915f352e" containerName="registry-server" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.734720 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="da091ade-ee09-42fb-90c2-5307cce2401c" containerName="registry-server" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.735491 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.738021 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-h58mr" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.741842 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.861888 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-523c8715-f4ac-46ea-890d-c284d78a0497\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-523c8715-f4ac-46ea-890d-c284d78a0497\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") " pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.861957 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdz9z\" (UniqueName: \"kubernetes.io/projected/7db40489-095f-41fb-80d9-23d910f266bf-kube-api-access-hdz9z\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") " pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.963433 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-523c8715-f4ac-46ea-890d-c284d78a0497\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-523c8715-f4ac-46ea-890d-c284d78a0497\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") " pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.963518 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdz9z\" (UniqueName: \"kubernetes.io/projected/7db40489-095f-41fb-80d9-23d910f266bf-kube-api-access-hdz9z\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") " pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.965982 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.966025 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-523c8715-f4ac-46ea-890d-c284d78a0497\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-523c8715-f4ac-46ea-890d-c284d78a0497\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/eee80692cf3e63463ff74dbe570352ba323eaddfdc5977e9891d952568b624c5/globalmount\"" pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.986368 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdz9z\" (UniqueName: \"kubernetes.io/projected/7db40489-095f-41fb-80d9-23d910f266bf-kube-api-access-hdz9z\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") " pod="openstack/mariadb-copy-data" Nov 25 15:54:39 crc kubenswrapper[4879]: I1125 15:54:39.992241 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-523c8715-f4ac-46ea-890d-c284d78a0497\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-523c8715-f4ac-46ea-890d-c284d78a0497\") pod \"mariadb-copy-data\" (UID: \"7db40489-095f-41fb-80d9-23d910f266bf\") " pod="openstack/mariadb-copy-data" Nov 25 15:54:40 crc kubenswrapper[4879]: I1125 15:54:40.054018 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 25 15:54:40 crc kubenswrapper[4879]: I1125 15:54:40.540482 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 25 15:54:40 crc kubenswrapper[4879]: I1125 15:54:40.644763 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:54:40 crc kubenswrapper[4879]: E1125 15:54:40.645190 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 15:54:40 crc kubenswrapper[4879]: I1125 15:54:40.848436 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7db40489-095f-41fb-80d9-23d910f266bf","Type":"ContainerStarted","Data":"9a3089cb5d610434d9470b3597d683b1181d924bff89d27ce621362fcdca9318"} Nov 25 15:54:40 crc kubenswrapper[4879]: I1125 15:54:40.848482 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7db40489-095f-41fb-80d9-23d910f266bf","Type":"ContainerStarted","Data":"47dff7c8e39ba7864819d99d425c9631a1898fc82e136a8f14f5ce8694ebcbab"} Nov 25 15:54:40 crc kubenswrapper[4879]: I1125 15:54:40.869898 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.869877905 podStartE2EDuration="2.869877905s" podCreationTimestamp="2025-11-25 15:54:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:54:40.86331337 +0000 UTC m=+5372.466726451" watchObservedRunningTime="2025-11-25 15:54:40.869877905 +0000 UTC m=+5372.473290976" Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.582745 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.584778 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.592727 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.716827 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bbgh\" (UniqueName: \"kubernetes.io/projected/adc29556-f4fa-4536-b6d5-753e507d5d2b-kube-api-access-7bbgh\") pod \"mariadb-client\" (UID: \"adc29556-f4fa-4536-b6d5-753e507d5d2b\") " pod="openstack/mariadb-client" Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.818881 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bbgh\" (UniqueName: \"kubernetes.io/projected/adc29556-f4fa-4536-b6d5-753e507d5d2b-kube-api-access-7bbgh\") pod \"mariadb-client\" (UID: \"adc29556-f4fa-4536-b6d5-753e507d5d2b\") " pod="openstack/mariadb-client" Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.836921 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bbgh\" (UniqueName: \"kubernetes.io/projected/adc29556-f4fa-4536-b6d5-753e507d5d2b-kube-api-access-7bbgh\") pod \"mariadb-client\" (UID: \"adc29556-f4fa-4536-b6d5-753e507d5d2b\") " pod="openstack/mariadb-client" Nov 25 15:54:43 crc kubenswrapper[4879]: I1125 15:54:43.907946 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:44 crc kubenswrapper[4879]: I1125 15:54:44.340790 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:44 crc kubenswrapper[4879]: I1125 15:54:44.884351 4879 generic.go:334] "Generic (PLEG): container finished" podID="adc29556-f4fa-4536-b6d5-753e507d5d2b" containerID="ec1f6d8ab59479b1f76ce78af3a26b697d95afda0c449311d018abb8bda53052" exitCode=0 Nov 25 15:54:44 crc kubenswrapper[4879]: I1125 15:54:44.884473 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"adc29556-f4fa-4536-b6d5-753e507d5d2b","Type":"ContainerDied","Data":"ec1f6d8ab59479b1f76ce78af3a26b697d95afda0c449311d018abb8bda53052"} Nov 25 15:54:44 crc kubenswrapper[4879]: I1125 15:54:44.884681 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"adc29556-f4fa-4536-b6d5-753e507d5d2b","Type":"ContainerStarted","Data":"ea72de297aa7435890846153e87cb280336eea4c55c41e7a99068e581f58066d"} Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.192343 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.213331 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_adc29556-f4fa-4536-b6d5-753e507d5d2b/mariadb-client/0.log" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.248898 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.255370 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.261078 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bbgh\" (UniqueName: \"kubernetes.io/projected/adc29556-f4fa-4536-b6d5-753e507d5d2b-kube-api-access-7bbgh\") pod \"adc29556-f4fa-4536-b6d5-753e507d5d2b\" (UID: \"adc29556-f4fa-4536-b6d5-753e507d5d2b\") " Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.265928 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adc29556-f4fa-4536-b6d5-753e507d5d2b-kube-api-access-7bbgh" (OuterVolumeSpecName: "kube-api-access-7bbgh") pod "adc29556-f4fa-4536-b6d5-753e507d5d2b" (UID: "adc29556-f4fa-4536-b6d5-753e507d5d2b"). InnerVolumeSpecName "kube-api-access-7bbgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.349352 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:46 crc kubenswrapper[4879]: E1125 15:54:46.349725 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adc29556-f4fa-4536-b6d5-753e507d5d2b" containerName="mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.349744 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="adc29556-f4fa-4536-b6d5-753e507d5d2b" containerName="mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.349942 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="adc29556-f4fa-4536-b6d5-753e507d5d2b" containerName="mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.350516 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.355534 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.363165 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bbgh\" (UniqueName: \"kubernetes.io/projected/adc29556-f4fa-4536-b6d5-753e507d5d2b-kube-api-access-7bbgh\") on node \"crc\" DevicePath \"\"" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.464222 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m47rb\" (UniqueName: \"kubernetes.io/projected/2d1166ad-5048-459d-bf4e-e43a76d27c7e-kube-api-access-m47rb\") pod \"mariadb-client\" (UID: \"2d1166ad-5048-459d-bf4e-e43a76d27c7e\") " pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.566035 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m47rb\" (UniqueName: \"kubernetes.io/projected/2d1166ad-5048-459d-bf4e-e43a76d27c7e-kube-api-access-m47rb\") pod \"mariadb-client\" (UID: \"2d1166ad-5048-459d-bf4e-e43a76d27c7e\") " pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.582250 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m47rb\" (UniqueName: \"kubernetes.io/projected/2d1166ad-5048-459d-bf4e-e43a76d27c7e-kube-api-access-m47rb\") pod \"mariadb-client\" (UID: \"2d1166ad-5048-459d-bf4e-e43a76d27c7e\") " pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.669116 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.902502 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea72de297aa7435890846153e87cb280336eea4c55c41e7a99068e581f58066d" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.902569 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:46 crc kubenswrapper[4879]: I1125 15:54:46.921549 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="adc29556-f4fa-4536-b6d5-753e507d5d2b" podUID="2d1166ad-5048-459d-bf4e-e43a76d27c7e" Nov 25 15:54:47 crc kubenswrapper[4879]: I1125 15:54:47.075612 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:47 crc kubenswrapper[4879]: W1125 15:54:47.080156 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d1166ad_5048_459d_bf4e_e43a76d27c7e.slice/crio-c7d638e34b82f96088b4146e02a1711b2d41d976643cbaa82e5a7612d0ea8460 WatchSource:0}: Error finding container c7d638e34b82f96088b4146e02a1711b2d41d976643cbaa82e5a7612d0ea8460: Status 404 returned error can't find the container with id c7d638e34b82f96088b4146e02a1711b2d41d976643cbaa82e5a7612d0ea8460 Nov 25 15:54:47 crc kubenswrapper[4879]: I1125 15:54:47.657442 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adc29556-f4fa-4536-b6d5-753e507d5d2b" path="/var/lib/kubelet/pods/adc29556-f4fa-4536-b6d5-753e507d5d2b/volumes" Nov 25 15:54:47 crc kubenswrapper[4879]: I1125 15:54:47.910395 4879 generic.go:334] "Generic (PLEG): container finished" podID="2d1166ad-5048-459d-bf4e-e43a76d27c7e" containerID="8c3e8c1a4e5fdb6c6fc314bec71e7d49e227b86bd34d2a715da3c577734db104" exitCode=0 Nov 25 15:54:47 crc kubenswrapper[4879]: I1125 15:54:47.910443 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"2d1166ad-5048-459d-bf4e-e43a76d27c7e","Type":"ContainerDied","Data":"8c3e8c1a4e5fdb6c6fc314bec71e7d49e227b86bd34d2a715da3c577734db104"} Nov 25 15:54:47 crc kubenswrapper[4879]: I1125 15:54:47.910472 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"2d1166ad-5048-459d-bf4e-e43a76d27c7e","Type":"ContainerStarted","Data":"c7d638e34b82f96088b4146e02a1711b2d41d976643cbaa82e5a7612d0ea8460"} Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.169836 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.190944 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_2d1166ad-5048-459d-bf4e-e43a76d27c7e/mariadb-client/0.log" Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.211433 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.218158 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.302876 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m47rb\" (UniqueName: \"kubernetes.io/projected/2d1166ad-5048-459d-bf4e-e43a76d27c7e-kube-api-access-m47rb\") pod \"2d1166ad-5048-459d-bf4e-e43a76d27c7e\" (UID: \"2d1166ad-5048-459d-bf4e-e43a76d27c7e\") " Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.308804 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d1166ad-5048-459d-bf4e-e43a76d27c7e-kube-api-access-m47rb" (OuterVolumeSpecName: "kube-api-access-m47rb") pod "2d1166ad-5048-459d-bf4e-e43a76d27c7e" (UID: "2d1166ad-5048-459d-bf4e-e43a76d27c7e"). InnerVolumeSpecName "kube-api-access-m47rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.404804 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m47rb\" (UniqueName: \"kubernetes.io/projected/2d1166ad-5048-459d-bf4e-e43a76d27c7e-kube-api-access-m47rb\") on node \"crc\" DevicePath \"\"" Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.654070 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d1166ad-5048-459d-bf4e-e43a76d27c7e" path="/var/lib/kubelet/pods/2d1166ad-5048-459d-bf4e-e43a76d27c7e/volumes" Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.924142 4879 scope.go:117] "RemoveContainer" containerID="8c3e8c1a4e5fdb6c6fc314bec71e7d49e227b86bd34d2a715da3c577734db104" Nov 25 15:54:49 crc kubenswrapper[4879]: I1125 15:54:49.924200 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 25 15:54:51 crc kubenswrapper[4879]: I1125 15:54:51.644338 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:54:51 crc kubenswrapper[4879]: I1125 15:54:51.943650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"1dd6ec0d3ee4f3a94e99f78eba8962061f508166d482ac0ddcf87f6b6222597a"} Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.206856 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 15:56:20 crc kubenswrapper[4879]: E1125 15:56:20.207764 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d1166ad-5048-459d-bf4e-e43a76d27c7e" containerName="mariadb-client" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.207781 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d1166ad-5048-459d-bf4e-e43a76d27c7e" containerName="mariadb-client" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.207948 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d1166ad-5048-459d-bf4e-e43a76d27c7e" containerName="mariadb-client" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.208831 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.211140 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.211406 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-lxhzq" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.215904 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.217998 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.225334 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.226978 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.234400 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.235715 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.247051 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.277232 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348532 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348592 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c08ccb-e227-4456-aa16-e391ea9f7a1b-config\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348616 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bad026-7ba9-46a3-91fa-96d023831aa5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348637 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46bad026-7ba9-46a3-91fa-96d023831aa5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348761 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2f621350-15b8-4743-a730-95537ed7845c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f621350-15b8-4743-a730-95537ed7845c\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348803 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46bad026-7ba9-46a3-91fa-96d023831aa5-config\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348833 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c169ae20-c872-409b-b4e2-f8088466f40b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c169ae20-c872-409b-b4e2-f8088466f40b\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348856 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-886ff\" (UniqueName: \"kubernetes.io/projected/46bad026-7ba9-46a3-91fa-96d023831aa5-kube-api-access-886ff\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348911 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/46bad026-7ba9-46a3-91fa-96d023831aa5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348969 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80c08ccb-e227-4456-aa16-e391ea9f7a1b-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.348987 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68trl\" (UniqueName: \"kubernetes.io/projected/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-kube-api-access-68trl\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349022 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mkrz\" (UniqueName: \"kubernetes.io/projected/80c08ccb-e227-4456-aa16-e391ea9f7a1b-kube-api-access-4mkrz\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349040 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349083 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349790 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c08ccb-e227-4456-aa16-e391ea9f7a1b-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349852 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349911 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-config\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.349950 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80c08ccb-e227-4456-aa16-e391ea9f7a1b-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.395985 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.397411 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.402367 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.403330 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.406330 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-htmkh" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.408649 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.409882 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.425988 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.453552 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455375 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2f621350-15b8-4743-a730-95537ed7845c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f621350-15b8-4743-a730-95537ed7845c\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455437 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46bad026-7ba9-46a3-91fa-96d023831aa5-config\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455473 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c169ae20-c872-409b-b4e2-f8088466f40b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c169ae20-c872-409b-b4e2-f8088466f40b\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455500 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-886ff\" (UniqueName: \"kubernetes.io/projected/46bad026-7ba9-46a3-91fa-96d023831aa5-kube-api-access-886ff\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455539 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/46bad026-7ba9-46a3-91fa-96d023831aa5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455569 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80c08ccb-e227-4456-aa16-e391ea9f7a1b-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455598 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68trl\" (UniqueName: \"kubernetes.io/projected/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-kube-api-access-68trl\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455632 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mkrz\" (UniqueName: \"kubernetes.io/projected/80c08ccb-e227-4456-aa16-e391ea9f7a1b-kube-api-access-4mkrz\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455665 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455687 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455732 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c08ccb-e227-4456-aa16-e391ea9f7a1b-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455770 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455812 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-config\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455841 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80c08ccb-e227-4456-aa16-e391ea9f7a1b-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455914 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.455975 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c08ccb-e227-4456-aa16-e391ea9f7a1b-config\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.456001 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bad026-7ba9-46a3-91fa-96d023831aa5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.456062 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46bad026-7ba9-46a3-91fa-96d023831aa5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.457215 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.461582 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-config\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.461915 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.479899 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.480773 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80c08ccb-e227-4456-aa16-e391ea9f7a1b-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.481341 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.481715 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.481753 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c169ae20-c872-409b-b4e2-f8088466f40b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c169ae20-c872-409b-b4e2-f8088466f40b\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3d636a359317881d1950759e3df5190017f6273c1983092d95796491cadc97d0/globalmount\"" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.481799 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.481844 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2f621350-15b8-4743-a730-95537ed7845c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f621350-15b8-4743-a730-95537ed7845c\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a088f18343710f817ab74e6b12180011998c065b4c23a5f14b3ee8c5847dc4d4/globalmount\"" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.481881 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80c08ccb-e227-4456-aa16-e391ea9f7a1b-config\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.482552 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.482726 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/46bad026-7ba9-46a3-91fa-96d023831aa5-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.483085 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46bad026-7ba9-46a3-91fa-96d023831aa5-config\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.483581 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.483623 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/99aa64258104a8e14b4b6ef367e096cd877e790f0ef3b6569afacc54f91874c5/globalmount\"" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.484141 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80c08ccb-e227-4456-aa16-e391ea9f7a1b-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.484765 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46bad026-7ba9-46a3-91fa-96d023831aa5-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.486533 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mkrz\" (UniqueName: \"kubernetes.io/projected/80c08ccb-e227-4456-aa16-e391ea9f7a1b-kube-api-access-4mkrz\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.488345 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68trl\" (UniqueName: \"kubernetes.io/projected/424ac0b4-4812-4bbd-b151-5b00ed6b6b0d-kube-api-access-68trl\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.488521 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bad026-7ba9-46a3-91fa-96d023831aa5-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.490249 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.492231 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c08ccb-e227-4456-aa16-e391ea9f7a1b-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.495218 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-886ff\" (UniqueName: \"kubernetes.io/projected/46bad026-7ba9-46a3-91fa-96d023831aa5-kube-api-access-886ff\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.525070 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c169ae20-c872-409b-b4e2-f8088466f40b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c169ae20-c872-409b-b4e2-f8088466f40b\") pod \"ovsdbserver-nb-2\" (UID: \"80c08ccb-e227-4456-aa16-e391ea9f7a1b\") " pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.532426 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2f621350-15b8-4743-a730-95537ed7845c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2f621350-15b8-4743-a730-95537ed7845c\") pod \"ovsdbserver-nb-0\" (UID: \"46bad026-7ba9-46a3-91fa-96d023831aa5\") " pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.540014 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-751de83a-2edb-44be-9fbe-0c9a5affcc63\") pod \"ovsdbserver-nb-1\" (UID: \"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d\") " pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.547398 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.557199 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558411 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d08e32-ebc8-4bea-90f5-13da2037b453-config\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558497 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw7qh\" (UniqueName: \"kubernetes.io/projected/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-kube-api-access-nw7qh\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558532 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88d08e32-ebc8-4bea-90f5-13da2037b453-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558569 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558614 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558637 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pf4j\" (UniqueName: \"kubernetes.io/projected/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-kube-api-access-8pf4j\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558669 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558702 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558729 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558761 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88d08e32-ebc8-4bea-90f5-13da2037b453-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558839 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx6h2\" (UniqueName: \"kubernetes.io/projected/88d08e32-ebc8-4bea-90f5-13da2037b453-kube-api-access-xx6h2\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558863 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-config\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558887 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-config\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558908 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558928 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558948 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.558998 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88d08e32-ebc8-4bea-90f5-13da2037b453-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.559016 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.659996 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660057 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88d08e32-ebc8-4bea-90f5-13da2037b453-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660090 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx6h2\" (UniqueName: \"kubernetes.io/projected/88d08e32-ebc8-4bea-90f5-13da2037b453-kube-api-access-xx6h2\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660132 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-config\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660156 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-config\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660172 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660185 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660202 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660246 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88d08e32-ebc8-4bea-90f5-13da2037b453-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660260 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660305 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d08e32-ebc8-4bea-90f5-13da2037b453-config\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660323 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw7qh\" (UniqueName: \"kubernetes.io/projected/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-kube-api-access-nw7qh\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660344 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88d08e32-ebc8-4bea-90f5-13da2037b453-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660369 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660394 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660412 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pf4j\" (UniqueName: \"kubernetes.io/projected/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-kube-api-access-8pf4j\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660431 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.660447 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.662031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.662406 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88d08e32-ebc8-4bea-90f5-13da2037b453-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.662919 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-config\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.663846 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88d08e32-ebc8-4bea-90f5-13da2037b453-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.664819 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88d08e32-ebc8-4bea-90f5-13da2037b453-config\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.665647 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.665762 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.665789 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b6118fb42808b24698d7decd9f6a22f51205d815c5a67967df6ae478805de422/globalmount\"" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.666316 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.666349 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4712bbea5a1a8716c361ac518d2cfef563f32c130f5565cdd0ff41f28984c2b9/globalmount\"" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.666408 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.666427 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f47908be46ed6a10ab135e9227a016116799f965d0f5d831541664101d40afc/globalmount\"" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.667196 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.668196 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-config\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.671351 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88d08e32-ebc8-4bea-90f5-13da2037b453-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.673961 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.674301 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.682444 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.684751 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx6h2\" (UniqueName: \"kubernetes.io/projected/88d08e32-ebc8-4bea-90f5-13da2037b453-kube-api-access-xx6h2\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.686678 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw7qh\" (UniqueName: \"kubernetes.io/projected/dfe5dcf4-c62b-4d71-8459-f873013d4bc0-kube-api-access-nw7qh\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.690060 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pf4j\" (UniqueName: \"kubernetes.io/projected/8d75a7ac-1bb1-4483-8ff3-6087d704e2c6-kube-api-access-8pf4j\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.709817 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d19990b0-68c3-463e-b3d0-68befc4e3374\") pod \"ovsdbserver-sb-0\" (UID: \"88d08e32-ebc8-4bea-90f5-13da2037b453\") " pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.710349 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ad9e42de-4400-407a-b2f0-d1aaa910af2f\") pod \"ovsdbserver-sb-1\" (UID: \"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6\") " pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.713445 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b060577-4b40-4f9f-b4bd-c5d5ba503642\") pod \"ovsdbserver-sb-2\" (UID: \"dfe5dcf4-c62b-4d71-8459-f873013d4bc0\") " pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.722795 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.735413 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.830665 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:20 crc kubenswrapper[4879]: I1125 15:56:20.948440 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.084344 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 25 15:56:21 crc kubenswrapper[4879]: W1125 15:56:21.185647 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80c08ccb_e227_4456_aa16_e391ea9f7a1b.slice/crio-2eba64ca635d954e038e66bc53f4b2e253953d919f22d4dd64b615119ddd580e WatchSource:0}: Error finding container 2eba64ca635d954e038e66bc53f4b2e253953d919f22d4dd64b615119ddd580e: Status 404 returned error can't find the container with id 2eba64ca635d954e038e66bc53f4b2e253953d919f22d4dd64b615119ddd580e Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.185679 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.273995 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 25 15:56:21 crc kubenswrapper[4879]: W1125 15:56:21.291974 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d75a7ac_1bb1_4483_8ff3_6087d704e2c6.slice/crio-494ec7dac70d9a31ca2a4644e91d59580b2d9bb6b6a906b1439a7f9357683b7b WatchSource:0}: Error finding container 494ec7dac70d9a31ca2a4644e91d59580b2d9bb6b6a906b1439a7f9357683b7b: Status 404 returned error can't find the container with id 494ec7dac70d9a31ca2a4644e91d59580b2d9bb6b6a906b1439a7f9357683b7b Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.411847 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.502829 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.665379 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=2.665359392 podStartE2EDuration="2.665359392s" podCreationTimestamp="2025-11-25 15:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:21.66461484 +0000 UTC m=+5473.268027911" watchObservedRunningTime="2025-11-25 15:56:21.665359392 +0000 UTC m=+5473.268772463" Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.672802 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"80c08ccb-e227-4456-aa16-e391ea9f7a1b","Type":"ContainerStarted","Data":"88dab1d444d2baf1440eb1516835ba1d5ffb0b792723393baf744cfd7b89542d"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.672959 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"80c08ccb-e227-4456-aa16-e391ea9f7a1b","Type":"ContainerStarted","Data":"098d876b239d057e7fa94f317b58d425f032bdbe707ab6609bc810077be6c77c"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673043 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"80c08ccb-e227-4456-aa16-e391ea9f7a1b","Type":"ContainerStarted","Data":"2eba64ca635d954e038e66bc53f4b2e253953d919f22d4dd64b615119ddd580e"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673195 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"dfe5dcf4-c62b-4d71-8459-f873013d4bc0","Type":"ContainerStarted","Data":"b4e2bf93f85dffaed3121b3becfb80a658bf13a1c679a4c8811dadf0fc9113ac"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673282 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"46bad026-7ba9-46a3-91fa-96d023831aa5","Type":"ContainerStarted","Data":"9399e5ffd34a718f236269ce3b99078ee5bf743f9df5abdd756a61b8f653e1ec"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673369 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d","Type":"ContainerStarted","Data":"52e08d95afcbc3d266d167c76ea473d4c92827a134153461b414ec201f8986e8"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673460 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d","Type":"ContainerStarted","Data":"5992349e2923c277a146b4651a34b75fa8f5f601a2101b5ba4777084b5e15fd9"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673548 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"424ac0b4-4812-4bbd-b151-5b00ed6b6b0d","Type":"ContainerStarted","Data":"6f4883e7a26c78112ff1262eafc94a853f139a0f7eedcaad6646713c4e8e0e78"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673641 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6","Type":"ContainerStarted","Data":"28a939c8a2130127d7b4459591336fdb900bb6a2ea49478fe698564685f83ff3"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.673746 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6","Type":"ContainerStarted","Data":"494ec7dac70d9a31ca2a4644e91d59580b2d9bb6b6a906b1439a7f9357683b7b"} Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.693951 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 25 15:56:21 crc kubenswrapper[4879]: I1125 15:56:21.697996 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=2.697976965 podStartE2EDuration="2.697976965s" podCreationTimestamp="2025-11-25 15:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:21.689885235 +0000 UTC m=+5473.293298306" watchObservedRunningTime="2025-11-25 15:56:21.697976965 +0000 UTC m=+5473.301390036" Nov 25 15:56:21 crc kubenswrapper[4879]: W1125 15:56:21.702156 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88d08e32_ebc8_4bea_90f5_13da2037b453.slice/crio-b99d2654612c4c8d6a08fd45b98cb250744de380d76b86987785173c6c10ac47 WatchSource:0}: Error finding container b99d2654612c4c8d6a08fd45b98cb250744de380d76b86987785173c6c10ac47: Status 404 returned error can't find the container with id b99d2654612c4c8d6a08fd45b98cb250744de380d76b86987785173c6c10ac47 Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.667876 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"dfe5dcf4-c62b-4d71-8459-f873013d4bc0","Type":"ContainerStarted","Data":"9f34c9edbdb29e7429582bb9f437a9d54de4a6aa04cc0db3aaa3e87703ba78fb"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.668265 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"dfe5dcf4-c62b-4d71-8459-f873013d4bc0","Type":"ContainerStarted","Data":"076ca8bd391eb91cd353bb21f2d506fb456ce0c4916b75020b259156415c0768"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.670563 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"46bad026-7ba9-46a3-91fa-96d023831aa5","Type":"ContainerStarted","Data":"2ccd79978c593e65487d6a4db7d1eff1cd0086343615343028084efe6e9b0110"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.670600 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"46bad026-7ba9-46a3-91fa-96d023831aa5","Type":"ContainerStarted","Data":"15b380af071a30fd4204b002bd420eb9e695e330b0758208fcbdc0c990538672"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.672371 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"8d75a7ac-1bb1-4483-8ff3-6087d704e2c6","Type":"ContainerStarted","Data":"6e1ccbc83684eeb323e1257419b4525d0902b0ea8efd58212325a7d42b21b702"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.674522 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"88d08e32-ebc8-4bea-90f5-13da2037b453","Type":"ContainerStarted","Data":"77a36e895922cb6536ae40e8f6d8c2a7f47902dff53a639e2f95e8de58bad8b1"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.674643 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"88d08e32-ebc8-4bea-90f5-13da2037b453","Type":"ContainerStarted","Data":"3bae70f3c7892b2eef6bf5467b72510cee9d5dc5689eb1bf2033e61ae5576e8e"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.674655 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"88d08e32-ebc8-4bea-90f5-13da2037b453","Type":"ContainerStarted","Data":"b99d2654612c4c8d6a08fd45b98cb250744de380d76b86987785173c6c10ac47"} Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.690361 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.690342645 podStartE2EDuration="3.690342645s" podCreationTimestamp="2025-11-25 15:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:22.683268125 +0000 UTC m=+5474.286681216" watchObservedRunningTime="2025-11-25 15:56:22.690342645 +0000 UTC m=+5474.293755716" Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.704718 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.704700281 podStartE2EDuration="3.704700281s" podCreationTimestamp="2025-11-25 15:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:22.699664238 +0000 UTC m=+5474.303077329" watchObservedRunningTime="2025-11-25 15:56:22.704700281 +0000 UTC m=+5474.308113352" Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.716270 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.716252448 podStartE2EDuration="3.716252448s" podCreationTimestamp="2025-11-25 15:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:22.712590325 +0000 UTC m=+5474.316003396" watchObservedRunningTime="2025-11-25 15:56:22.716252448 +0000 UTC m=+5474.319665519" Nov 25 15:56:22 crc kubenswrapper[4879]: I1125 15:56:22.733787 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.733766115 podStartE2EDuration="3.733766115s" podCreationTimestamp="2025-11-25 15:56:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:22.729903685 +0000 UTC m=+5474.333316756" watchObservedRunningTime="2025-11-25 15:56:22.733766115 +0000 UTC m=+5474.337179186" Nov 25 15:56:23 crc kubenswrapper[4879]: I1125 15:56:23.548204 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:23 crc kubenswrapper[4879]: I1125 15:56:23.558064 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:23 crc kubenswrapper[4879]: I1125 15:56:23.723761 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:23 crc kubenswrapper[4879]: I1125 15:56:23.735701 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:23 crc kubenswrapper[4879]: I1125 15:56:23.831518 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:23 crc kubenswrapper[4879]: I1125 15:56:23.950191 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:25 crc kubenswrapper[4879]: I1125 15:56:25.548243 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:25 crc kubenswrapper[4879]: I1125 15:56:25.558458 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:25 crc kubenswrapper[4879]: I1125 15:56:25.723845 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:25 crc kubenswrapper[4879]: I1125 15:56:25.736421 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:25 crc kubenswrapper[4879]: I1125 15:56:25.831728 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:25 crc kubenswrapper[4879]: I1125 15:56:25.949805 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.580585 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.590240 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.624656 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.630109 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.767347 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.814509 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.822140 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5db5ff4945-c6d65"] Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.824379 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.826135 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.836671 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5db5ff4945-c6d65"] Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.837879 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.863647 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.885809 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.935337 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.962926 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7m4r\" (UniqueName: \"kubernetes.io/projected/0072824a-e6d1-4fee-baac-64759c9171cc-kube-api-access-z7m4r\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.962995 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-config\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.963022 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-ovsdbserver-nb\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.963219 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-dns-svc\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:26 crc kubenswrapper[4879]: I1125 15:56:26.995413 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.051688 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.064467 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-dns-svc\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.064571 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7m4r\" (UniqueName: \"kubernetes.io/projected/0072824a-e6d1-4fee-baac-64759c9171cc-kube-api-access-z7m4r\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.064638 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-config\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.064652 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-ovsdbserver-nb\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.065895 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-dns-svc\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.066333 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-config\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.066877 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-ovsdbserver-nb\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.100982 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7m4r\" (UniqueName: \"kubernetes.io/projected/0072824a-e6d1-4fee-baac-64759c9171cc-kube-api-access-z7m4r\") pod \"dnsmasq-dns-5db5ff4945-c6d65\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.145480 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5db5ff4945-c6d65"] Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.146056 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.182186 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56467fbf9c-dtf95"] Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.186599 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.190343 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.212538 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56467fbf9c-dtf95"] Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.270347 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-config\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.270432 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-sb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.270499 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-nb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.270631 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2vmb\" (UniqueName: \"kubernetes.io/projected/717f3252-a021-43b1-ba4e-de174e6db38a-kube-api-access-n2vmb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.270669 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-dns-svc\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.372100 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-sb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.372973 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-sb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.373055 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-nb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.373368 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2vmb\" (UniqueName: \"kubernetes.io/projected/717f3252-a021-43b1-ba4e-de174e6db38a-kube-api-access-n2vmb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.373423 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-dns-svc\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.373499 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-config\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.374270 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-config\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.374506 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-nb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.378820 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-dns-svc\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.395342 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2vmb\" (UniqueName: \"kubernetes.io/projected/717f3252-a021-43b1-ba4e-de174e6db38a-kube-api-access-n2vmb\") pod \"dnsmasq-dns-56467fbf9c-dtf95\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.568962 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.622227 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5db5ff4945-c6d65"] Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.714788 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" event={"ID":"0072824a-e6d1-4fee-baac-64759c9171cc","Type":"ContainerStarted","Data":"16611b6f35712318d4cbfae1816d94e7740fb7c0b41f308f727d323140356db9"} Nov 25 15:56:27 crc kubenswrapper[4879]: I1125 15:56:27.973406 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56467fbf9c-dtf95"] Nov 25 15:56:27 crc kubenswrapper[4879]: W1125 15:56:27.974751 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod717f3252_a021_43b1_ba4e_de174e6db38a.slice/crio-d3d3ee948eace5086c67419a71eb55b2dbfe4fcaad81fa192ae9c116bbaf9b82 WatchSource:0}: Error finding container d3d3ee948eace5086c67419a71eb55b2dbfe4fcaad81fa192ae9c116bbaf9b82: Status 404 returned error can't find the container with id d3d3ee948eace5086c67419a71eb55b2dbfe4fcaad81fa192ae9c116bbaf9b82 Nov 25 15:56:28 crc kubenswrapper[4879]: I1125 15:56:28.724583 4879 generic.go:334] "Generic (PLEG): container finished" podID="0072824a-e6d1-4fee-baac-64759c9171cc" containerID="675dc70e91e8abfffb137aa864f5aa725ade7f9424e99c19384540516599d377" exitCode=0 Nov 25 15:56:28 crc kubenswrapper[4879]: I1125 15:56:28.724650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" event={"ID":"0072824a-e6d1-4fee-baac-64759c9171cc","Type":"ContainerDied","Data":"675dc70e91e8abfffb137aa864f5aa725ade7f9424e99c19384540516599d377"} Nov 25 15:56:28 crc kubenswrapper[4879]: I1125 15:56:28.728468 4879 generic.go:334] "Generic (PLEG): container finished" podID="717f3252-a021-43b1-ba4e-de174e6db38a" containerID="fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353" exitCode=0 Nov 25 15:56:28 crc kubenswrapper[4879]: I1125 15:56:28.728540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" event={"ID":"717f3252-a021-43b1-ba4e-de174e6db38a","Type":"ContainerDied","Data":"fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353"} Nov 25 15:56:28 crc kubenswrapper[4879]: I1125 15:56:28.728806 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" event={"ID":"717f3252-a021-43b1-ba4e-de174e6db38a","Type":"ContainerStarted","Data":"d3d3ee948eace5086c67419a71eb55b2dbfe4fcaad81fa192ae9c116bbaf9b82"} Nov 25 15:56:28 crc kubenswrapper[4879]: I1125 15:56:28.992842 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.101304 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7m4r\" (UniqueName: \"kubernetes.io/projected/0072824a-e6d1-4fee-baac-64759c9171cc-kube-api-access-z7m4r\") pod \"0072824a-e6d1-4fee-baac-64759c9171cc\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.101364 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-ovsdbserver-nb\") pod \"0072824a-e6d1-4fee-baac-64759c9171cc\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.101423 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-dns-svc\") pod \"0072824a-e6d1-4fee-baac-64759c9171cc\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.101460 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-config\") pod \"0072824a-e6d1-4fee-baac-64759c9171cc\" (UID: \"0072824a-e6d1-4fee-baac-64759c9171cc\") " Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.113338 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0072824a-e6d1-4fee-baac-64759c9171cc-kube-api-access-z7m4r" (OuterVolumeSpecName: "kube-api-access-z7m4r") pod "0072824a-e6d1-4fee-baac-64759c9171cc" (UID: "0072824a-e6d1-4fee-baac-64759c9171cc"). InnerVolumeSpecName "kube-api-access-z7m4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.123251 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0072824a-e6d1-4fee-baac-64759c9171cc" (UID: "0072824a-e6d1-4fee-baac-64759c9171cc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.126322 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-config" (OuterVolumeSpecName: "config") pod "0072824a-e6d1-4fee-baac-64759c9171cc" (UID: "0072824a-e6d1-4fee-baac-64759c9171cc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.128375 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0072824a-e6d1-4fee-baac-64759c9171cc" (UID: "0072824a-e6d1-4fee-baac-64759c9171cc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.203016 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7m4r\" (UniqueName: \"kubernetes.io/projected/0072824a-e6d1-4fee-baac-64759c9171cc-kube-api-access-z7m4r\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.203060 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.203073 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.203082 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0072824a-e6d1-4fee-baac-64759c9171cc-config\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.737265 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" event={"ID":"717f3252-a021-43b1-ba4e-de174e6db38a","Type":"ContainerStarted","Data":"0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835"} Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.739316 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" event={"ID":"0072824a-e6d1-4fee-baac-64759c9171cc","Type":"ContainerDied","Data":"16611b6f35712318d4cbfae1816d94e7740fb7c0b41f308f727d323140356db9"} Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.739377 4879 scope.go:117] "RemoveContainer" containerID="675dc70e91e8abfffb137aa864f5aa725ade7f9424e99c19384540516599d377" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.739554 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5db5ff4945-c6d65" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.762836 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" podStartSLOduration=2.76280877 podStartE2EDuration="2.76280877s" podCreationTimestamp="2025-11-25 15:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:29.75998386 +0000 UTC m=+5481.363396931" watchObservedRunningTime="2025-11-25 15:56:29.76280877 +0000 UTC m=+5481.366221841" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.802593 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5db5ff4945-c6d65"] Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.804146 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5db5ff4945-c6d65"] Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.961985 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Nov 25 15:56:29 crc kubenswrapper[4879]: E1125 15:56:29.962541 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0072824a-e6d1-4fee-baac-64759c9171cc" containerName="init" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.962564 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0072824a-e6d1-4fee-baac-64759c9171cc" containerName="init" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.962757 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0072824a-e6d1-4fee-baac-64759c9171cc" containerName="init" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.963799 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.969365 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Nov 25 15:56:29 crc kubenswrapper[4879]: I1125 15:56:29.972493 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.016829 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.016905 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qgdz\" (UniqueName: \"kubernetes.io/projected/6d94a7fd-3168-40a2-94ec-3b34536b1637-kube-api-access-9qgdz\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.016926 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/6d94a7fd-3168-40a2-94ec-3b34536b1637-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.118571 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.118675 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qgdz\" (UniqueName: \"kubernetes.io/projected/6d94a7fd-3168-40a2-94ec-3b34536b1637-kube-api-access-9qgdz\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.118703 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/6d94a7fd-3168-40a2-94ec-3b34536b1637-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.121550 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.121586 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d1d427314c26360136c24b159ba9ab5c3bcb3b586ca76a5488b3a16cd0d782b3/globalmount\"" pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.124628 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/6d94a7fd-3168-40a2-94ec-3b34536b1637-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.137922 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qgdz\" (UniqueName: \"kubernetes.io/projected/6d94a7fd-3168-40a2-94ec-3b34536b1637-kube-api-access-9qgdz\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.168559 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-04f82d14-a378-4ae7-a0ed-672445d06cea\") pod \"ovn-copy-data\" (UID: \"6d94a7fd-3168-40a2-94ec-3b34536b1637\") " pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.280497 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.751561 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 25 15:56:30 crc kubenswrapper[4879]: I1125 15:56:30.752209 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:31 crc kubenswrapper[4879]: I1125 15:56:31.655099 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0072824a-e6d1-4fee-baac-64759c9171cc" path="/var/lib/kubelet/pods/0072824a-e6d1-4fee-baac-64759c9171cc/volumes" Nov 25 15:56:31 crc kubenswrapper[4879]: I1125 15:56:31.760700 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"6d94a7fd-3168-40a2-94ec-3b34536b1637","Type":"ContainerStarted","Data":"df2495acce1db0ea40f2b80a9b81db98c3c0fa50b453826a6f3cec993fa06cbb"} Nov 25 15:56:31 crc kubenswrapper[4879]: I1125 15:56:31.760742 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"6d94a7fd-3168-40a2-94ec-3b34536b1637","Type":"ContainerStarted","Data":"bc1d3fcf9d9e347a502d6448850f40bddeff10434662abb7e75ec174382165c4"} Nov 25 15:56:31 crc kubenswrapper[4879]: I1125 15:56:31.780364 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.78034516 podStartE2EDuration="3.78034516s" podCreationTimestamp="2025-11-25 15:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:31.778541279 +0000 UTC m=+5483.381954340" watchObservedRunningTime="2025-11-25 15:56:31.78034516 +0000 UTC m=+5483.383758231" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.096007 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.097790 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.099847 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.099889 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-thvh7" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.100544 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.115732 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.129640 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/866649f8-e50c-40d5-8b63-f419f521f9a6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.129699 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2bjr\" (UniqueName: \"kubernetes.io/projected/866649f8-e50c-40d5-8b63-f419f521f9a6-kube-api-access-n2bjr\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.129824 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/866649f8-e50c-40d5-8b63-f419f521f9a6-config\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.129863 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866649f8-e50c-40d5-8b63-f419f521f9a6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.129888 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/866649f8-e50c-40d5-8b63-f419f521f9a6-scripts\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.231100 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/866649f8-e50c-40d5-8b63-f419f521f9a6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.231221 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2bjr\" (UniqueName: \"kubernetes.io/projected/866649f8-e50c-40d5-8b63-f419f521f9a6-kube-api-access-n2bjr\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.231294 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/866649f8-e50c-40d5-8b63-f419f521f9a6-config\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.231327 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866649f8-e50c-40d5-8b63-f419f521f9a6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.231354 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/866649f8-e50c-40d5-8b63-f419f521f9a6-scripts\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.231663 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/866649f8-e50c-40d5-8b63-f419f521f9a6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.232278 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/866649f8-e50c-40d5-8b63-f419f521f9a6-scripts\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.232615 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/866649f8-e50c-40d5-8b63-f419f521f9a6-config\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.240570 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866649f8-e50c-40d5-8b63-f419f521f9a6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.247894 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2bjr\" (UniqueName: \"kubernetes.io/projected/866649f8-e50c-40d5-8b63-f419f521f9a6-kube-api-access-n2bjr\") pod \"ovn-northd-0\" (UID: \"866649f8-e50c-40d5-8b63-f419f521f9a6\") " pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.430084 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.570375 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.640550 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-sdnzq"] Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.640797 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerName="dnsmasq-dns" containerID="cri-o://5e5c77be29cb635bee1d4c95e6e2cd7666e1bb814a8070db28fc12329a7b3b5d" gracePeriod=10 Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.822912 4879 generic.go:334] "Generic (PLEG): container finished" podID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerID="5e5c77be29cb635bee1d4c95e6e2cd7666e1bb814a8070db28fc12329a7b3b5d" exitCode=0 Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.823202 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" event={"ID":"c85229dc-568d-4f6c-8ffe-004dd62eb92e","Type":"ContainerDied","Data":"5e5c77be29cb635bee1d4c95e6e2cd7666e1bb814a8070db28fc12329a7b3b5d"} Nov 25 15:56:37 crc kubenswrapper[4879]: I1125 15:56:37.867506 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.038478 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.148188 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pf2zk\" (UniqueName: \"kubernetes.io/projected/c85229dc-568d-4f6c-8ffe-004dd62eb92e-kube-api-access-pf2zk\") pod \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.148365 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-config\") pod \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.148429 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-dns-svc\") pod \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\" (UID: \"c85229dc-568d-4f6c-8ffe-004dd62eb92e\") " Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.153262 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c85229dc-568d-4f6c-8ffe-004dd62eb92e-kube-api-access-pf2zk" (OuterVolumeSpecName: "kube-api-access-pf2zk") pod "c85229dc-568d-4f6c-8ffe-004dd62eb92e" (UID: "c85229dc-568d-4f6c-8ffe-004dd62eb92e"). InnerVolumeSpecName "kube-api-access-pf2zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.195985 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-config" (OuterVolumeSpecName: "config") pod "c85229dc-568d-4f6c-8ffe-004dd62eb92e" (UID: "c85229dc-568d-4f6c-8ffe-004dd62eb92e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.196469 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c85229dc-568d-4f6c-8ffe-004dd62eb92e" (UID: "c85229dc-568d-4f6c-8ffe-004dd62eb92e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.250267 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-config\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.250301 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c85229dc-568d-4f6c-8ffe-004dd62eb92e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.250311 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pf2zk\" (UniqueName: \"kubernetes.io/projected/c85229dc-568d-4f6c-8ffe-004dd62eb92e-kube-api-access-pf2zk\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.833183 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"866649f8-e50c-40d5-8b63-f419f521f9a6","Type":"ContainerStarted","Data":"a817fdb2eef2e13d93da26bec084f9e785a99085e5701b7cea4f6b02ee53abee"} Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.833265 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.833282 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"866649f8-e50c-40d5-8b63-f419f521f9a6","Type":"ContainerStarted","Data":"287e9bace01c641fc013450f8d99333449ab3e3cc87305ff15156a12d2c17f6a"} Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.833295 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"866649f8-e50c-40d5-8b63-f419f521f9a6","Type":"ContainerStarted","Data":"47eff83082ce559aae462ad11247a5c2a6acaeb874e50c7e4ffb7e148a074c63"} Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.836377 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" event={"ID":"c85229dc-568d-4f6c-8ffe-004dd62eb92e","Type":"ContainerDied","Data":"d42b481aed30e57e6559c79fcfb5a9c54220ab1e29bf0b13a9d7009f4eccfbbd"} Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.836433 4879 scope.go:117] "RemoveContainer" containerID="5e5c77be29cb635bee1d4c95e6e2cd7666e1bb814a8070db28fc12329a7b3b5d" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.836555 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-sdnzq" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.857528 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.857496668 podStartE2EDuration="1.857496668s" podCreationTimestamp="2025-11-25 15:56:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:38.852015634 +0000 UTC m=+5490.455428715" watchObservedRunningTime="2025-11-25 15:56:38.857496668 +0000 UTC m=+5490.460909739" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.860754 4879 scope.go:117] "RemoveContainer" containerID="9733f42362eaa34aed048974736702273b7e7cf922b7a0009dcf0af12211b1a7" Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.875388 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-sdnzq"] Nov 25 15:56:38 crc kubenswrapper[4879]: I1125 15:56:38.882369 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-sdnzq"] Nov 25 15:56:39 crc kubenswrapper[4879]: I1125 15:56:39.659341 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" path="/var/lib/kubelet/pods/c85229dc-568d-4f6c-8ffe-004dd62eb92e/volumes" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.516072 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-fdl5v"] Nov 25 15:56:42 crc kubenswrapper[4879]: E1125 15:56:42.516846 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerName="dnsmasq-dns" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.516864 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerName="dnsmasq-dns" Nov 25 15:56:42 crc kubenswrapper[4879]: E1125 15:56:42.516905 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerName="init" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.516915 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerName="init" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.517112 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c85229dc-568d-4f6c-8ffe-004dd62eb92e" containerName="dnsmasq-dns" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.517835 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.523513 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fdl5v"] Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.611652 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7345-account-create-csxxd"] Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.612791 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.614584 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.619243 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6c7b\" (UniqueName: \"kubernetes.io/projected/04b41c50-a8bf-4159-9632-7c84953367b9-kube-api-access-q6c7b\") pod \"keystone-db-create-fdl5v\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.619436 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04b41c50-a8bf-4159-9632-7c84953367b9-operator-scripts\") pod \"keystone-db-create-fdl5v\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.620605 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7345-account-create-csxxd"] Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.720828 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kf4d\" (UniqueName: \"kubernetes.io/projected/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-kube-api-access-5kf4d\") pod \"keystone-7345-account-create-csxxd\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.720890 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6c7b\" (UniqueName: \"kubernetes.io/projected/04b41c50-a8bf-4159-9632-7c84953367b9-kube-api-access-q6c7b\") pod \"keystone-db-create-fdl5v\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.721068 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04b41c50-a8bf-4159-9632-7c84953367b9-operator-scripts\") pod \"keystone-db-create-fdl5v\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.721210 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-operator-scripts\") pod \"keystone-7345-account-create-csxxd\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.721746 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04b41c50-a8bf-4159-9632-7c84953367b9-operator-scripts\") pod \"keystone-db-create-fdl5v\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.741223 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6c7b\" (UniqueName: \"kubernetes.io/projected/04b41c50-a8bf-4159-9632-7c84953367b9-kube-api-access-q6c7b\") pod \"keystone-db-create-fdl5v\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.822568 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kf4d\" (UniqueName: \"kubernetes.io/projected/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-kube-api-access-5kf4d\") pod \"keystone-7345-account-create-csxxd\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.822989 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-operator-scripts\") pod \"keystone-7345-account-create-csxxd\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.823713 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-operator-scripts\") pod \"keystone-7345-account-create-csxxd\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.839213 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kf4d\" (UniqueName: \"kubernetes.io/projected/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-kube-api-access-5kf4d\") pod \"keystone-7345-account-create-csxxd\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.840713 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:42 crc kubenswrapper[4879]: I1125 15:56:42.929002 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.137061 4879 scope.go:117] "RemoveContainer" containerID="d85f5857b32a24f5b93ca075b088dca8b2da5365e77c2786c37d773d3a6bd094" Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.165047 4879 scope.go:117] "RemoveContainer" containerID="d6ac269f28956b59b9dba33643de9e19a6c2ace67ea67aa9256d847d2cadc5ee" Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.183950 4879 scope.go:117] "RemoveContainer" containerID="5f4ab9268de1f5c2a6631e232fd55bd422a193069f41d1f8dce35c3ae1dc8403" Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.203263 4879 scope.go:117] "RemoveContainer" containerID="bbb378f22aa963cee394575007fae43400be2512ab3b1790e49d7a27eaaade17" Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.264835 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fdl5v"] Nov 25 15:56:43 crc kubenswrapper[4879]: W1125 15:56:43.276150 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04b41c50_a8bf_4159_9632_7c84953367b9.slice/crio-b64441d8fd7761d09ecf3921cad15ccc88dc8cee93e1532fb546ce07b60b2854 WatchSource:0}: Error finding container b64441d8fd7761d09ecf3921cad15ccc88dc8cee93e1532fb546ce07b60b2854: Status 404 returned error can't find the container with id b64441d8fd7761d09ecf3921cad15ccc88dc8cee93e1532fb546ce07b60b2854 Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.366061 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7345-account-create-csxxd"] Nov 25 15:56:43 crc kubenswrapper[4879]: W1125 15:56:43.373849 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac980b0e_5d92_48b7_a8cb_9bd4b490b9f4.slice/crio-f08222dc6a6bb88c78cf022742ab3d085acf662bbab04dd0921cfa1e80bd4b96 WatchSource:0}: Error finding container f08222dc6a6bb88c78cf022742ab3d085acf662bbab04dd0921cfa1e80bd4b96: Status 404 returned error can't find the container with id f08222dc6a6bb88c78cf022742ab3d085acf662bbab04dd0921cfa1e80bd4b96 Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.885900 4879 generic.go:334] "Generic (PLEG): container finished" podID="04b41c50-a8bf-4159-9632-7c84953367b9" containerID="d63274a205b0f5e75c14303587038911c14d25b558145749dad291ea725caabe" exitCode=0 Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.885977 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fdl5v" event={"ID":"04b41c50-a8bf-4159-9632-7c84953367b9","Type":"ContainerDied","Data":"d63274a205b0f5e75c14303587038911c14d25b558145749dad291ea725caabe"} Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.886447 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fdl5v" event={"ID":"04b41c50-a8bf-4159-9632-7c84953367b9","Type":"ContainerStarted","Data":"b64441d8fd7761d09ecf3921cad15ccc88dc8cee93e1532fb546ce07b60b2854"} Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.889106 4879 generic.go:334] "Generic (PLEG): container finished" podID="ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" containerID="d6f58e8d4960308886fd6ea3590564482654153daabb97d2d516550fcacc929f" exitCode=0 Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.889231 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7345-account-create-csxxd" event={"ID":"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4","Type":"ContainerDied","Data":"d6f58e8d4960308886fd6ea3590564482654153daabb97d2d516550fcacc929f"} Nov 25 15:56:43 crc kubenswrapper[4879]: I1125 15:56:43.889264 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7345-account-create-csxxd" event={"ID":"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4","Type":"ContainerStarted","Data":"f08222dc6a6bb88c78cf022742ab3d085acf662bbab04dd0921cfa1e80bd4b96"} Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.294961 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.302050 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.468990 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-operator-scripts\") pod \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.469362 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6c7b\" (UniqueName: \"kubernetes.io/projected/04b41c50-a8bf-4159-9632-7c84953367b9-kube-api-access-q6c7b\") pod \"04b41c50-a8bf-4159-9632-7c84953367b9\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.469523 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04b41c50-a8bf-4159-9632-7c84953367b9-operator-scripts\") pod \"04b41c50-a8bf-4159-9632-7c84953367b9\" (UID: \"04b41c50-a8bf-4159-9632-7c84953367b9\") " Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.469609 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kf4d\" (UniqueName: \"kubernetes.io/projected/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-kube-api-access-5kf4d\") pod \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\" (UID: \"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4\") " Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.470518 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" (UID: "ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.471020 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04b41c50-a8bf-4159-9632-7c84953367b9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04b41c50-a8bf-4159-9632-7c84953367b9" (UID: "04b41c50-a8bf-4159-9632-7c84953367b9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.475884 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04b41c50-a8bf-4159-9632-7c84953367b9-kube-api-access-q6c7b" (OuterVolumeSpecName: "kube-api-access-q6c7b") pod "04b41c50-a8bf-4159-9632-7c84953367b9" (UID: "04b41c50-a8bf-4159-9632-7c84953367b9"). InnerVolumeSpecName "kube-api-access-q6c7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.476316 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-kube-api-access-5kf4d" (OuterVolumeSpecName: "kube-api-access-5kf4d") pod "ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" (UID: "ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4"). InnerVolumeSpecName "kube-api-access-5kf4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.571267 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.571304 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6c7b\" (UniqueName: \"kubernetes.io/projected/04b41c50-a8bf-4159-9632-7c84953367b9-kube-api-access-q6c7b\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.571314 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04b41c50-a8bf-4159-9632-7c84953367b9-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.571324 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kf4d\" (UniqueName: \"kubernetes.io/projected/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4-kube-api-access-5kf4d\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.905819 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fdl5v" event={"ID":"04b41c50-a8bf-4159-9632-7c84953367b9","Type":"ContainerDied","Data":"b64441d8fd7761d09ecf3921cad15ccc88dc8cee93e1532fb546ce07b60b2854"} Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.905860 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b64441d8fd7761d09ecf3921cad15ccc88dc8cee93e1532fb546ce07b60b2854" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.905857 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fdl5v" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.907859 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7345-account-create-csxxd" event={"ID":"ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4","Type":"ContainerDied","Data":"f08222dc6a6bb88c78cf022742ab3d085acf662bbab04dd0921cfa1e80bd4b96"} Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.907892 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f08222dc6a6bb88c78cf022742ab3d085acf662bbab04dd0921cfa1e80bd4b96" Nov 25 15:56:45 crc kubenswrapper[4879]: I1125 15:56:45.907937 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7345-account-create-csxxd" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.052116 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4pllz"] Nov 25 15:56:48 crc kubenswrapper[4879]: E1125 15:56:48.052700 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04b41c50-a8bf-4159-9632-7c84953367b9" containerName="mariadb-database-create" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.052713 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="04b41c50-a8bf-4159-9632-7c84953367b9" containerName="mariadb-database-create" Nov 25 15:56:48 crc kubenswrapper[4879]: E1125 15:56:48.052754 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" containerName="mariadb-account-create" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.052760 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" containerName="mariadb-account-create" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.052924 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" containerName="mariadb-account-create" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.052948 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="04b41c50-a8bf-4159-9632-7c84953367b9" containerName="mariadb-database-create" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.053516 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.058404 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fd7cl" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.060361 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.060758 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.060924 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.066750 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4pllz"] Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.214414 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w2km\" (UniqueName: \"kubernetes.io/projected/2aadf716-b59d-4ef9-9427-0ceb46ff8816-kube-api-access-4w2km\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.214487 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-combined-ca-bundle\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.214573 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-config-data\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.316384 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w2km\" (UniqueName: \"kubernetes.io/projected/2aadf716-b59d-4ef9-9427-0ceb46ff8816-kube-api-access-4w2km\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.316482 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-combined-ca-bundle\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.316601 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-config-data\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.324160 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-config-data\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.324560 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-combined-ca-bundle\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.332883 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w2km\" (UniqueName: \"kubernetes.io/projected/2aadf716-b59d-4ef9-9427-0ceb46ff8816-kube-api-access-4w2km\") pod \"keystone-db-sync-4pllz\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.371971 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.782514 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4pllz"] Nov 25 15:56:48 crc kubenswrapper[4879]: W1125 15:56:48.786069 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2aadf716_b59d_4ef9_9427_0ceb46ff8816.slice/crio-0170508ce27e2239a788defce3f973bdbe5c9117a9afa6c28630f82789f103bd WatchSource:0}: Error finding container 0170508ce27e2239a788defce3f973bdbe5c9117a9afa6c28630f82789f103bd: Status 404 returned error can't find the container with id 0170508ce27e2239a788defce3f973bdbe5c9117a9afa6c28630f82789f103bd Nov 25 15:56:48 crc kubenswrapper[4879]: I1125 15:56:48.932058 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4pllz" event={"ID":"2aadf716-b59d-4ef9-9427-0ceb46ff8816","Type":"ContainerStarted","Data":"0170508ce27e2239a788defce3f973bdbe5c9117a9afa6c28630f82789f103bd"} Nov 25 15:56:49 crc kubenswrapper[4879]: I1125 15:56:49.944711 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4pllz" event={"ID":"2aadf716-b59d-4ef9-9427-0ceb46ff8816","Type":"ContainerStarted","Data":"fecb88eb3e1d141b24f52f41031140e29f6776b08b9708e5fc1edb7dfc65c584"} Nov 25 15:56:49 crc kubenswrapper[4879]: I1125 15:56:49.979979 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4pllz" podStartSLOduration=1.9799592179999999 podStartE2EDuration="1.979959218s" podCreationTimestamp="2025-11-25 15:56:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:49.974286727 +0000 UTC m=+5501.577699808" watchObservedRunningTime="2025-11-25 15:56:49.979959218 +0000 UTC m=+5501.583372289" Nov 25 15:56:50 crc kubenswrapper[4879]: I1125 15:56:50.955326 4879 generic.go:334] "Generic (PLEG): container finished" podID="2aadf716-b59d-4ef9-9427-0ceb46ff8816" containerID="fecb88eb3e1d141b24f52f41031140e29f6776b08b9708e5fc1edb7dfc65c584" exitCode=0 Nov 25 15:56:50 crc kubenswrapper[4879]: I1125 15:56:50.955366 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4pllz" event={"ID":"2aadf716-b59d-4ef9-9427-0ceb46ff8816","Type":"ContainerDied","Data":"fecb88eb3e1d141b24f52f41031140e29f6776b08b9708e5fc1edb7dfc65c584"} Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.330765 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.381667 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-combined-ca-bundle\") pod \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.381802 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-config-data\") pod \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.381834 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w2km\" (UniqueName: \"kubernetes.io/projected/2aadf716-b59d-4ef9-9427-0ceb46ff8816-kube-api-access-4w2km\") pod \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\" (UID: \"2aadf716-b59d-4ef9-9427-0ceb46ff8816\") " Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.387906 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aadf716-b59d-4ef9-9427-0ceb46ff8816-kube-api-access-4w2km" (OuterVolumeSpecName: "kube-api-access-4w2km") pod "2aadf716-b59d-4ef9-9427-0ceb46ff8816" (UID: "2aadf716-b59d-4ef9-9427-0ceb46ff8816"). InnerVolumeSpecName "kube-api-access-4w2km". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.406670 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2aadf716-b59d-4ef9-9427-0ceb46ff8816" (UID: "2aadf716-b59d-4ef9-9427-0ceb46ff8816"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.427271 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-config-data" (OuterVolumeSpecName: "config-data") pod "2aadf716-b59d-4ef9-9427-0ceb46ff8816" (UID: "2aadf716-b59d-4ef9-9427-0ceb46ff8816"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.483034 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w2km\" (UniqueName: \"kubernetes.io/projected/2aadf716-b59d-4ef9-9427-0ceb46ff8816-kube-api-access-4w2km\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.483070 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.483084 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2aadf716-b59d-4ef9-9427-0ceb46ff8816-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.490086 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.920107 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bcbbddf7-jwnlk"] Nov 25 15:56:52 crc kubenswrapper[4879]: E1125 15:56:52.920472 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aadf716-b59d-4ef9-9427-0ceb46ff8816" containerName="keystone-db-sync" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.920491 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aadf716-b59d-4ef9-9427-0ceb46ff8816" containerName="keystone-db-sync" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.920667 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aadf716-b59d-4ef9-9427-0ceb46ff8816" containerName="keystone-db-sync" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.930807 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.977567 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bcbbddf7-jwnlk"] Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.992271 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lldft\" (UniqueName: \"kubernetes.io/projected/9a8e4590-eb09-4f47-9b2e-36946792bdd5-kube-api-access-lldft\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.992448 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-sb\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.992516 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-nb\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.992599 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-config\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.992748 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-dns-svc\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.992809 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hht89"] Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.995541 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.997915 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4pllz" event={"ID":"2aadf716-b59d-4ef9-9427-0ceb46ff8816","Type":"ContainerDied","Data":"0170508ce27e2239a788defce3f973bdbe5c9117a9afa6c28630f82789f103bd"} Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.997951 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0170508ce27e2239a788defce3f973bdbe5c9117a9afa6c28630f82789f103bd" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.998004 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4pllz" Nov 25 15:56:52 crc kubenswrapper[4879]: I1125 15:56:52.999345 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.018561 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hht89"] Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094390 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-scripts\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094545 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lldft\" (UniqueName: \"kubernetes.io/projected/9a8e4590-eb09-4f47-9b2e-36946792bdd5-kube-api-access-lldft\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094764 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-credential-keys\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094792 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-fernet-keys\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094821 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-config-data\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094896 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-sb\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094935 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hdhb\" (UniqueName: \"kubernetes.io/projected/f34c8d56-9756-469e-b656-5ca38c759d64-kube-api-access-6hdhb\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094976 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-nb\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.094997 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-config\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.095374 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-dns-svc\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.095419 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-combined-ca-bundle\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.096279 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-sb\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.096306 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-dns-svc\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.096344 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-nb\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.096363 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-config\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.117244 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lldft\" (UniqueName: \"kubernetes.io/projected/9a8e4590-eb09-4f47-9b2e-36946792bdd5-kube-api-access-lldft\") pod \"dnsmasq-dns-84bcbbddf7-jwnlk\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.196823 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-credential-keys\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.196882 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-fernet-keys\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.196901 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-config-data\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.196938 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hdhb\" (UniqueName: \"kubernetes.io/projected/f34c8d56-9756-469e-b656-5ca38c759d64-kube-api-access-6hdhb\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.197013 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-combined-ca-bundle\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.197037 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-scripts\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.200710 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-scripts\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.200900 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-credential-keys\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.201509 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-config-data\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.201931 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-combined-ca-bundle\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.202160 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-fernet-keys\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.215418 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hdhb\" (UniqueName: \"kubernetes.io/projected/f34c8d56-9756-469e-b656-5ca38c759d64-kube-api-access-6hdhb\") pod \"keystone-bootstrap-hht89\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.271468 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.318768 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.713864 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bcbbddf7-jwnlk"] Nov 25 15:56:53 crc kubenswrapper[4879]: W1125 15:56:53.716176 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a8e4590_eb09_4f47_9b2e_36946792bdd5.slice/crio-f84f9032f322eeebc38a7251f874351d5d41d97e0fc954a885a3fa50ac72fa08 WatchSource:0}: Error finding container f84f9032f322eeebc38a7251f874351d5d41d97e0fc954a885a3fa50ac72fa08: Status 404 returned error can't find the container with id f84f9032f322eeebc38a7251f874351d5d41d97e0fc954a885a3fa50ac72fa08 Nov 25 15:56:53 crc kubenswrapper[4879]: I1125 15:56:53.804491 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hht89"] Nov 25 15:56:53 crc kubenswrapper[4879]: W1125 15:56:53.809646 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf34c8d56_9756_469e_b656_5ca38c759d64.slice/crio-dbd1786d844a26c8feef3b9ec1b83e5f244bb82e635c5804ae520e3ba27ca206 WatchSource:0}: Error finding container dbd1786d844a26c8feef3b9ec1b83e5f244bb82e635c5804ae520e3ba27ca206: Status 404 returned error can't find the container with id dbd1786d844a26c8feef3b9ec1b83e5f244bb82e635c5804ae520e3ba27ca206 Nov 25 15:56:54 crc kubenswrapper[4879]: I1125 15:56:54.008857 4879 generic.go:334] "Generic (PLEG): container finished" podID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerID="5e1e663aa1c15afeee6b7a8ded3ace8f07ac11ca358ba44d2650668d51537e5f" exitCode=0 Nov 25 15:56:54 crc kubenswrapper[4879]: I1125 15:56:54.009216 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" event={"ID":"9a8e4590-eb09-4f47-9b2e-36946792bdd5","Type":"ContainerDied","Data":"5e1e663aa1c15afeee6b7a8ded3ace8f07ac11ca358ba44d2650668d51537e5f"} Nov 25 15:56:54 crc kubenswrapper[4879]: I1125 15:56:54.009269 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" event={"ID":"9a8e4590-eb09-4f47-9b2e-36946792bdd5","Type":"ContainerStarted","Data":"f84f9032f322eeebc38a7251f874351d5d41d97e0fc954a885a3fa50ac72fa08"} Nov 25 15:56:54 crc kubenswrapper[4879]: I1125 15:56:54.011942 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hht89" event={"ID":"f34c8d56-9756-469e-b656-5ca38c759d64","Type":"ContainerStarted","Data":"706f81d34201040112f6f101080b7d5ff912b4fe675930e89f303d9fa1e58895"} Nov 25 15:56:54 crc kubenswrapper[4879]: I1125 15:56:54.011991 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hht89" event={"ID":"f34c8d56-9756-469e-b656-5ca38c759d64","Type":"ContainerStarted","Data":"dbd1786d844a26c8feef3b9ec1b83e5f244bb82e635c5804ae520e3ba27ca206"} Nov 25 15:56:54 crc kubenswrapper[4879]: I1125 15:56:54.053264 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hht89" podStartSLOduration=2.053242669 podStartE2EDuration="2.053242669s" podCreationTimestamp="2025-11-25 15:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:54.04586689 +0000 UTC m=+5505.649279961" watchObservedRunningTime="2025-11-25 15:56:54.053242669 +0000 UTC m=+5505.656655740" Nov 25 15:56:55 crc kubenswrapper[4879]: I1125 15:56:55.021870 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" event={"ID":"9a8e4590-eb09-4f47-9b2e-36946792bdd5","Type":"ContainerStarted","Data":"64e2774bd6094460f1787c9823f885ab357af220d911aec0a6fde040062008cd"} Nov 25 15:56:55 crc kubenswrapper[4879]: I1125 15:56:55.023193 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:56:55 crc kubenswrapper[4879]: I1125 15:56:55.049377 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" podStartSLOduration=3.049360376 podStartE2EDuration="3.049360376s" podCreationTimestamp="2025-11-25 15:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:56:55.039943699 +0000 UTC m=+5506.643356770" watchObservedRunningTime="2025-11-25 15:56:55.049360376 +0000 UTC m=+5506.652773447" Nov 25 15:56:58 crc kubenswrapper[4879]: I1125 15:56:58.049315 4879 generic.go:334] "Generic (PLEG): container finished" podID="f34c8d56-9756-469e-b656-5ca38c759d64" containerID="706f81d34201040112f6f101080b7d5ff912b4fe675930e89f303d9fa1e58895" exitCode=0 Nov 25 15:56:58 crc kubenswrapper[4879]: I1125 15:56:58.049419 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hht89" event={"ID":"f34c8d56-9756-469e-b656-5ca38c759d64","Type":"ContainerDied","Data":"706f81d34201040112f6f101080b7d5ff912b4fe675930e89f303d9fa1e58895"} Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.398534 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hht89" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.504594 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hdhb\" (UniqueName: \"kubernetes.io/projected/f34c8d56-9756-469e-b656-5ca38c759d64-kube-api-access-6hdhb\") pod \"f34c8d56-9756-469e-b656-5ca38c759d64\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.504720 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-scripts\") pod \"f34c8d56-9756-469e-b656-5ca38c759d64\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.504782 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-config-data\") pod \"f34c8d56-9756-469e-b656-5ca38c759d64\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.504801 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-combined-ca-bundle\") pod \"f34c8d56-9756-469e-b656-5ca38c759d64\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.504870 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-credential-keys\") pod \"f34c8d56-9756-469e-b656-5ca38c759d64\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.504897 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-fernet-keys\") pod \"f34c8d56-9756-469e-b656-5ca38c759d64\" (UID: \"f34c8d56-9756-469e-b656-5ca38c759d64\") " Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.510041 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-scripts" (OuterVolumeSpecName: "scripts") pod "f34c8d56-9756-469e-b656-5ca38c759d64" (UID: "f34c8d56-9756-469e-b656-5ca38c759d64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.510058 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f34c8d56-9756-469e-b656-5ca38c759d64-kube-api-access-6hdhb" (OuterVolumeSpecName: "kube-api-access-6hdhb") pod "f34c8d56-9756-469e-b656-5ca38c759d64" (UID: "f34c8d56-9756-469e-b656-5ca38c759d64"). InnerVolumeSpecName "kube-api-access-6hdhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.510392 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f34c8d56-9756-469e-b656-5ca38c759d64" (UID: "f34c8d56-9756-469e-b656-5ca38c759d64"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.510548 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f34c8d56-9756-469e-b656-5ca38c759d64" (UID: "f34c8d56-9756-469e-b656-5ca38c759d64"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.528234 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f34c8d56-9756-469e-b656-5ca38c759d64" (UID: "f34c8d56-9756-469e-b656-5ca38c759d64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.528686 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-config-data" (OuterVolumeSpecName: "config-data") pod "f34c8d56-9756-469e-b656-5ca38c759d64" (UID: "f34c8d56-9756-469e-b656-5ca38c759d64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.605910 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hdhb\" (UniqueName: \"kubernetes.io/projected/f34c8d56-9756-469e-b656-5ca38c759d64-kube-api-access-6hdhb\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.605956 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.605966 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.605975 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.605985 4879 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 15:56:59 crc kubenswrapper[4879]: I1125 15:56:59.605993 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f34c8d56-9756-469e-b656-5ca38c759d64-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.070779 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hht89" event={"ID":"f34c8d56-9756-469e-b656-5ca38c759d64","Type":"ContainerDied","Data":"dbd1786d844a26c8feef3b9ec1b83e5f244bb82e635c5804ae520e3ba27ca206"} Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.070859 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hht89" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.070883 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbd1786d844a26c8feef3b9ec1b83e5f244bb82e635c5804ae520e3ba27ca206" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.138347 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hht89"] Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.144703 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hht89"] Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.229900 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ctt6t"] Nov 25 15:57:00 crc kubenswrapper[4879]: E1125 15:57:00.230309 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f34c8d56-9756-469e-b656-5ca38c759d64" containerName="keystone-bootstrap" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.230330 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f34c8d56-9756-469e-b656-5ca38c759d64" containerName="keystone-bootstrap" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.230523 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f34c8d56-9756-469e-b656-5ca38c759d64" containerName="keystone-bootstrap" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.231190 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.233723 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.234193 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.234355 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.234556 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.234772 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fd7cl" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.247203 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ctt6t"] Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.421686 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-fernet-keys\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.421781 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-combined-ca-bundle\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.421938 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wgl8\" (UniqueName: \"kubernetes.io/projected/b74a6428-86dc-4239-8792-65fcd245f5ce-kube-api-access-8wgl8\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.422070 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-config-data\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.422209 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-scripts\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.422241 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-credential-keys\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.537113 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-scripts\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.537228 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-credential-keys\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.537307 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-fernet-keys\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.537393 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-combined-ca-bundle\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.537584 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wgl8\" (UniqueName: \"kubernetes.io/projected/b74a6428-86dc-4239-8792-65fcd245f5ce-kube-api-access-8wgl8\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.537887 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-config-data\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.544035 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-scripts\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.544141 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-combined-ca-bundle\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.544850 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-credential-keys\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.545023 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-fernet-keys\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.546031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-config-data\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.556218 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wgl8\" (UniqueName: \"kubernetes.io/projected/b74a6428-86dc-4239-8792-65fcd245f5ce-kube-api-access-8wgl8\") pod \"keystone-bootstrap-ctt6t\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:00 crc kubenswrapper[4879]: I1125 15:57:00.850902 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:01 crc kubenswrapper[4879]: I1125 15:57:01.267833 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ctt6t"] Nov 25 15:57:01 crc kubenswrapper[4879]: W1125 15:57:01.271164 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb74a6428_86dc_4239_8792_65fcd245f5ce.slice/crio-06df4ace90b5815619806ed023a19157284414df8ec343e5934f53cf0219ac51 WatchSource:0}: Error finding container 06df4ace90b5815619806ed023a19157284414df8ec343e5934f53cf0219ac51: Status 404 returned error can't find the container with id 06df4ace90b5815619806ed023a19157284414df8ec343e5934f53cf0219ac51 Nov 25 15:57:01 crc kubenswrapper[4879]: I1125 15:57:01.655007 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f34c8d56-9756-469e-b656-5ca38c759d64" path="/var/lib/kubelet/pods/f34c8d56-9756-469e-b656-5ca38c759d64/volumes" Nov 25 15:57:02 crc kubenswrapper[4879]: I1125 15:57:02.089797 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctt6t" event={"ID":"b74a6428-86dc-4239-8792-65fcd245f5ce","Type":"ContainerStarted","Data":"2e40c443f5be037996babfe2d680f7326ec514420ffdcd94c1458aea3a410bb2"} Nov 25 15:57:02 crc kubenswrapper[4879]: I1125 15:57:02.089850 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctt6t" event={"ID":"b74a6428-86dc-4239-8792-65fcd245f5ce","Type":"ContainerStarted","Data":"06df4ace90b5815619806ed023a19157284414df8ec343e5934f53cf0219ac51"} Nov 25 15:57:02 crc kubenswrapper[4879]: I1125 15:57:02.119615 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ctt6t" podStartSLOduration=2.119597279 podStartE2EDuration="2.119597279s" podCreationTimestamp="2025-11-25 15:57:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:57:02.112041105 +0000 UTC m=+5513.715454186" watchObservedRunningTime="2025-11-25 15:57:02.119597279 +0000 UTC m=+5513.723010350" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.273346 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.322422 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56467fbf9c-dtf95"] Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.322736 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" containerName="dnsmasq-dns" containerID="cri-o://0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835" gracePeriod=10 Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.777366 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.905005 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-dns-svc\") pod \"717f3252-a021-43b1-ba4e-de174e6db38a\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.905110 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-nb\") pod \"717f3252-a021-43b1-ba4e-de174e6db38a\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.905158 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-sb\") pod \"717f3252-a021-43b1-ba4e-de174e6db38a\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.905363 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-config\") pod \"717f3252-a021-43b1-ba4e-de174e6db38a\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.905394 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2vmb\" (UniqueName: \"kubernetes.io/projected/717f3252-a021-43b1-ba4e-de174e6db38a-kube-api-access-n2vmb\") pod \"717f3252-a021-43b1-ba4e-de174e6db38a\" (UID: \"717f3252-a021-43b1-ba4e-de174e6db38a\") " Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.910814 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/717f3252-a021-43b1-ba4e-de174e6db38a-kube-api-access-n2vmb" (OuterVolumeSpecName: "kube-api-access-n2vmb") pod "717f3252-a021-43b1-ba4e-de174e6db38a" (UID: "717f3252-a021-43b1-ba4e-de174e6db38a"). InnerVolumeSpecName "kube-api-access-n2vmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.960469 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "717f3252-a021-43b1-ba4e-de174e6db38a" (UID: "717f3252-a021-43b1-ba4e-de174e6db38a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.967665 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "717f3252-a021-43b1-ba4e-de174e6db38a" (UID: "717f3252-a021-43b1-ba4e-de174e6db38a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.969041 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "717f3252-a021-43b1-ba4e-de174e6db38a" (UID: "717f3252-a021-43b1-ba4e-de174e6db38a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:57:03 crc kubenswrapper[4879]: I1125 15:57:03.981389 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-config" (OuterVolumeSpecName: "config") pod "717f3252-a021-43b1-ba4e-de174e6db38a" (UID: "717f3252-a021-43b1-ba4e-de174e6db38a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.007294 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.007326 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.007338 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-config\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.007347 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2vmb\" (UniqueName: \"kubernetes.io/projected/717f3252-a021-43b1-ba4e-de174e6db38a-kube-api-access-n2vmb\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.007358 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/717f3252-a021-43b1-ba4e-de174e6db38a-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.106662 4879 generic.go:334] "Generic (PLEG): container finished" podID="717f3252-a021-43b1-ba4e-de174e6db38a" containerID="0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835" exitCode=0 Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.106717 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" event={"ID":"717f3252-a021-43b1-ba4e-de174e6db38a","Type":"ContainerDied","Data":"0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835"} Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.106749 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" event={"ID":"717f3252-a021-43b1-ba4e-de174e6db38a","Type":"ContainerDied","Data":"d3d3ee948eace5086c67419a71eb55b2dbfe4fcaad81fa192ae9c116bbaf9b82"} Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.106769 4879 scope.go:117] "RemoveContainer" containerID="0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.106927 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56467fbf9c-dtf95" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.130489 4879 scope.go:117] "RemoveContainer" containerID="fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.148430 4879 scope.go:117] "RemoveContainer" containerID="0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835" Nov 25 15:57:04 crc kubenswrapper[4879]: E1125 15:57:04.148805 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835\": container with ID starting with 0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835 not found: ID does not exist" containerID="0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.148834 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835"} err="failed to get container status \"0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835\": rpc error: code = NotFound desc = could not find container \"0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835\": container with ID starting with 0c2e3031caa6d5795b3301613bb29fa829e0ee13e9cc10c565753aae92b81835 not found: ID does not exist" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.148891 4879 scope.go:117] "RemoveContainer" containerID="fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353" Nov 25 15:57:04 crc kubenswrapper[4879]: E1125 15:57:04.150050 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353\": container with ID starting with fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353 not found: ID does not exist" containerID="fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.150079 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353"} err="failed to get container status \"fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353\": rpc error: code = NotFound desc = could not find container \"fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353\": container with ID starting with fb6973ac18ec3a7b63590c73055731c9125946f9ed283907d694f6db82ebe353 not found: ID does not exist" Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.158841 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56467fbf9c-dtf95"] Nov 25 15:57:04 crc kubenswrapper[4879]: I1125 15:57:04.167280 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56467fbf9c-dtf95"] Nov 25 15:57:05 crc kubenswrapper[4879]: I1125 15:57:05.121665 4879 generic.go:334] "Generic (PLEG): container finished" podID="b74a6428-86dc-4239-8792-65fcd245f5ce" containerID="2e40c443f5be037996babfe2d680f7326ec514420ffdcd94c1458aea3a410bb2" exitCode=0 Nov 25 15:57:05 crc kubenswrapper[4879]: I1125 15:57:05.121770 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctt6t" event={"ID":"b74a6428-86dc-4239-8792-65fcd245f5ce","Type":"ContainerDied","Data":"2e40c443f5be037996babfe2d680f7326ec514420ffdcd94c1458aea3a410bb2"} Nov 25 15:57:05 crc kubenswrapper[4879]: I1125 15:57:05.655218 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" path="/var/lib/kubelet/pods/717f3252-a021-43b1-ba4e-de174e6db38a/volumes" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.421878 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.449145 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-combined-ca-bundle\") pod \"b74a6428-86dc-4239-8792-65fcd245f5ce\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.449307 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wgl8\" (UniqueName: \"kubernetes.io/projected/b74a6428-86dc-4239-8792-65fcd245f5ce-kube-api-access-8wgl8\") pod \"b74a6428-86dc-4239-8792-65fcd245f5ce\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.449411 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-credential-keys\") pod \"b74a6428-86dc-4239-8792-65fcd245f5ce\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.449456 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-config-data\") pod \"b74a6428-86dc-4239-8792-65fcd245f5ce\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.449481 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-scripts\") pod \"b74a6428-86dc-4239-8792-65fcd245f5ce\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.449541 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-fernet-keys\") pod \"b74a6428-86dc-4239-8792-65fcd245f5ce\" (UID: \"b74a6428-86dc-4239-8792-65fcd245f5ce\") " Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.455441 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b74a6428-86dc-4239-8792-65fcd245f5ce" (UID: "b74a6428-86dc-4239-8792-65fcd245f5ce"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.455483 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-scripts" (OuterVolumeSpecName: "scripts") pod "b74a6428-86dc-4239-8792-65fcd245f5ce" (UID: "b74a6428-86dc-4239-8792-65fcd245f5ce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.456787 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b74a6428-86dc-4239-8792-65fcd245f5ce" (UID: "b74a6428-86dc-4239-8792-65fcd245f5ce"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.461498 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b74a6428-86dc-4239-8792-65fcd245f5ce-kube-api-access-8wgl8" (OuterVolumeSpecName: "kube-api-access-8wgl8") pod "b74a6428-86dc-4239-8792-65fcd245f5ce" (UID: "b74a6428-86dc-4239-8792-65fcd245f5ce"). InnerVolumeSpecName "kube-api-access-8wgl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.474791 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b74a6428-86dc-4239-8792-65fcd245f5ce" (UID: "b74a6428-86dc-4239-8792-65fcd245f5ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.480978 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-config-data" (OuterVolumeSpecName: "config-data") pod "b74a6428-86dc-4239-8792-65fcd245f5ce" (UID: "b74a6428-86dc-4239-8792-65fcd245f5ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.551619 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.551665 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wgl8\" (UniqueName: \"kubernetes.io/projected/b74a6428-86dc-4239-8792-65fcd245f5ce-kube-api-access-8wgl8\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.551680 4879 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.551692 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.551704 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:06 crc kubenswrapper[4879]: I1125 15:57:06.551715 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b74a6428-86dc-4239-8792-65fcd245f5ce-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.137556 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ctt6t" event={"ID":"b74a6428-86dc-4239-8792-65fcd245f5ce","Type":"ContainerDied","Data":"06df4ace90b5815619806ed023a19157284414df8ec343e5934f53cf0219ac51"} Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.137598 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ctt6t" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.137605 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06df4ace90b5815619806ed023a19157284414df8ec343e5934f53cf0219ac51" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.215918 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c5d6457d8-szl6w"] Nov 25 15:57:07 crc kubenswrapper[4879]: E1125 15:57:07.216279 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" containerName="init" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.216298 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" containerName="init" Nov 25 15:57:07 crc kubenswrapper[4879]: E1125 15:57:07.216325 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" containerName="dnsmasq-dns" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.216332 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" containerName="dnsmasq-dns" Nov 25 15:57:07 crc kubenswrapper[4879]: E1125 15:57:07.216345 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b74a6428-86dc-4239-8792-65fcd245f5ce" containerName="keystone-bootstrap" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.216351 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b74a6428-86dc-4239-8792-65fcd245f5ce" containerName="keystone-bootstrap" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.216530 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="717f3252-a021-43b1-ba4e-de174e6db38a" containerName="dnsmasq-dns" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.216552 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b74a6428-86dc-4239-8792-65fcd245f5ce" containerName="keystone-bootstrap" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.217104 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.219219 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.223346 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-fd7cl" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.223433 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.223685 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.235082 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c5d6457d8-szl6w"] Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.263903 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ggd6\" (UniqueName: \"kubernetes.io/projected/8937ae09-0ddc-4409-9419-dccc438b7ccb-kube-api-access-5ggd6\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.263997 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-credential-keys\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.264047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-combined-ca-bundle\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.264092 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-fernet-keys\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.264276 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-scripts\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.264487 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-config-data\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.365363 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-config-data\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.365448 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ggd6\" (UniqueName: \"kubernetes.io/projected/8937ae09-0ddc-4409-9419-dccc438b7ccb-kube-api-access-5ggd6\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.365481 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-credential-keys\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.365506 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-combined-ca-bundle\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.365529 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-fernet-keys\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.365558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-scripts\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.369493 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-config-data\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.369844 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-fernet-keys\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.369943 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-combined-ca-bundle\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.372858 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-credential-keys\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.374227 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8937ae09-0ddc-4409-9419-dccc438b7ccb-scripts\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.386654 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ggd6\" (UniqueName: \"kubernetes.io/projected/8937ae09-0ddc-4409-9419-dccc438b7ccb-kube-api-access-5ggd6\") pod \"keystone-c5d6457d8-szl6w\" (UID: \"8937ae09-0ddc-4409-9419-dccc438b7ccb\") " pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:07 crc kubenswrapper[4879]: I1125 15:57:07.546688 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:08 crc kubenswrapper[4879]: I1125 15:57:08.011499 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c5d6457d8-szl6w"] Nov 25 15:57:08 crc kubenswrapper[4879]: I1125 15:57:08.146927 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c5d6457d8-szl6w" event={"ID":"8937ae09-0ddc-4409-9419-dccc438b7ccb","Type":"ContainerStarted","Data":"539b3f9ba2acdd90c78e542a4578c145d20058feca413ac7fce90b6b9dd6a9d2"} Nov 25 15:57:09 crc kubenswrapper[4879]: I1125 15:57:09.156253 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c5d6457d8-szl6w" event={"ID":"8937ae09-0ddc-4409-9419-dccc438b7ccb","Type":"ContainerStarted","Data":"7d28cd73937f8c5105539b7cb81afad9fef1507300784fa0fbe3a452aa98a106"} Nov 25 15:57:09 crc kubenswrapper[4879]: I1125 15:57:09.156572 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:17 crc kubenswrapper[4879]: I1125 15:57:17.408851 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:57:17 crc kubenswrapper[4879]: I1125 15:57:17.409401 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:57:39 crc kubenswrapper[4879]: I1125 15:57:39.136535 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-c5d6457d8-szl6w" Nov 25 15:57:39 crc kubenswrapper[4879]: I1125 15:57:39.156044 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-c5d6457d8-szl6w" podStartSLOduration=32.156027047 podStartE2EDuration="32.156027047s" podCreationTimestamp="2025-11-25 15:57:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:57:09.171395152 +0000 UTC m=+5520.774808253" watchObservedRunningTime="2025-11-25 15:57:39.156027047 +0000 UTC m=+5550.759440118" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.513796 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.515532 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.518485 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.518510 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.518655 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-ckbcw" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.523339 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.652024 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5jtm\" (UniqueName: \"kubernetes.io/projected/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-kube-api-access-c5jtm\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.652109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.652196 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config-secret\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.753372 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config-secret\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.753472 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5jtm\" (UniqueName: \"kubernetes.io/projected/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-kube-api-access-c5jtm\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.753506 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.754550 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.759542 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config-secret\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.770101 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5jtm\" (UniqueName: \"kubernetes.io/projected/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-kube-api-access-c5jtm\") pod \"openstackclient\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " pod="openstack/openstackclient" Nov 25 15:57:42 crc kubenswrapper[4879]: I1125 15:57:42.837904 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 15:57:43 crc kubenswrapper[4879]: I1125 15:57:43.282781 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 25 15:57:43 crc kubenswrapper[4879]: I1125 15:57:43.429835 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"4dc81ff1-2201-4b21-8b7c-74940b4a8b24","Type":"ContainerStarted","Data":"74d6b11b68490d478ae38aa0447d66224fe458297bfbdc913d1c9e110eeb83f9"} Nov 25 15:57:44 crc kubenswrapper[4879]: I1125 15:57:44.440146 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"4dc81ff1-2201-4b21-8b7c-74940b4a8b24","Type":"ContainerStarted","Data":"5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18"} Nov 25 15:57:44 crc kubenswrapper[4879]: I1125 15:57:44.461424 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.461408456 podStartE2EDuration="2.461408456s" podCreationTimestamp="2025-11-25 15:57:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:57:44.454448249 +0000 UTC m=+5556.057861320" watchObservedRunningTime="2025-11-25 15:57:44.461408456 +0000 UTC m=+5556.064821517" Nov 25 15:57:47 crc kubenswrapper[4879]: I1125 15:57:47.409400 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:57:47 crc kubenswrapper[4879]: I1125 15:57:47.409936 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.408451 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.409059 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.409163 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.410237 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1dd6ec0d3ee4f3a94e99f78eba8962061f508166d482ac0ddcf87f6b6222597a"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.410334 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://1dd6ec0d3ee4f3a94e99f78eba8962061f508166d482ac0ddcf87f6b6222597a" gracePeriod=600 Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.736364 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="1dd6ec0d3ee4f3a94e99f78eba8962061f508166d482ac0ddcf87f6b6222597a" exitCode=0 Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.736412 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"1dd6ec0d3ee4f3a94e99f78eba8962061f508166d482ac0ddcf87f6b6222597a"} Nov 25 15:58:17 crc kubenswrapper[4879]: I1125 15:58:17.736769 4879 scope.go:117] "RemoveContainer" containerID="d05c3c108482b703e9e28bc5956d96eaed930a7c9e378c1d110074b856a14f38" Nov 25 15:58:18 crc kubenswrapper[4879]: I1125 15:58:18.759193 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f"} Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.741143 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-cl6z2"] Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.743379 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.771186 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-cl6z2"] Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.826835 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-3d30-account-create-vpdnx"] Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.827868 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.830389 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.843947 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3d30-account-create-vpdnx"] Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.934265 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a08e55e8-641b-4464-82cd-9414c1b7dab0-operator-scripts\") pod \"barbican-3d30-account-create-vpdnx\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.934609 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv9pt\" (UniqueName: \"kubernetes.io/projected/422d9538-42a1-42bb-8940-0e92f8f7e665-kube-api-access-mv9pt\") pod \"barbican-db-create-cl6z2\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.934786 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/422d9538-42a1-42bb-8940-0e92f8f7e665-operator-scripts\") pod \"barbican-db-create-cl6z2\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:21 crc kubenswrapper[4879]: I1125 15:59:21.934923 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rb87\" (UniqueName: \"kubernetes.io/projected/a08e55e8-641b-4464-82cd-9414c1b7dab0-kube-api-access-5rb87\") pod \"barbican-3d30-account-create-vpdnx\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.036587 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a08e55e8-641b-4464-82cd-9414c1b7dab0-operator-scripts\") pod \"barbican-3d30-account-create-vpdnx\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.036644 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv9pt\" (UniqueName: \"kubernetes.io/projected/422d9538-42a1-42bb-8940-0e92f8f7e665-kube-api-access-mv9pt\") pod \"barbican-db-create-cl6z2\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.036673 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/422d9538-42a1-42bb-8940-0e92f8f7e665-operator-scripts\") pod \"barbican-db-create-cl6z2\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.036708 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rb87\" (UniqueName: \"kubernetes.io/projected/a08e55e8-641b-4464-82cd-9414c1b7dab0-kube-api-access-5rb87\") pod \"barbican-3d30-account-create-vpdnx\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.037386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a08e55e8-641b-4464-82cd-9414c1b7dab0-operator-scripts\") pod \"barbican-3d30-account-create-vpdnx\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.037565 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/422d9538-42a1-42bb-8940-0e92f8f7e665-operator-scripts\") pod \"barbican-db-create-cl6z2\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.056027 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rb87\" (UniqueName: \"kubernetes.io/projected/a08e55e8-641b-4464-82cd-9414c1b7dab0-kube-api-access-5rb87\") pod \"barbican-3d30-account-create-vpdnx\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.056472 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv9pt\" (UniqueName: \"kubernetes.io/projected/422d9538-42a1-42bb-8940-0e92f8f7e665-kube-api-access-mv9pt\") pod \"barbican-db-create-cl6z2\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.067346 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.146539 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.531334 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-cl6z2"] Nov 25 15:59:22 crc kubenswrapper[4879]: I1125 15:59:22.623587 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3d30-account-create-vpdnx"] Nov 25 15:59:22 crc kubenswrapper[4879]: W1125 15:59:22.624453 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda08e55e8_641b_4464_82cd_9414c1b7dab0.slice/crio-f6e704cc08522d8516ce914258465613f4234b02202e653195bfd8038f210386 WatchSource:0}: Error finding container f6e704cc08522d8516ce914258465613f4234b02202e653195bfd8038f210386: Status 404 returned error can't find the container with id f6e704cc08522d8516ce914258465613f4234b02202e653195bfd8038f210386 Nov 25 15:59:23 crc kubenswrapper[4879]: I1125 15:59:23.286363 4879 generic.go:334] "Generic (PLEG): container finished" podID="422d9538-42a1-42bb-8940-0e92f8f7e665" containerID="d7bb95e3356be281ebd2b6906bb33a9f5078552ea2c8f08bfb96df92a1884f35" exitCode=0 Nov 25 15:59:23 crc kubenswrapper[4879]: I1125 15:59:23.286533 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-cl6z2" event={"ID":"422d9538-42a1-42bb-8940-0e92f8f7e665","Type":"ContainerDied","Data":"d7bb95e3356be281ebd2b6906bb33a9f5078552ea2c8f08bfb96df92a1884f35"} Nov 25 15:59:23 crc kubenswrapper[4879]: I1125 15:59:23.286590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-cl6z2" event={"ID":"422d9538-42a1-42bb-8940-0e92f8f7e665","Type":"ContainerStarted","Data":"29d1e4e1a917f5f0cc0fea964344e470761a5a92482979e8c3843507244e2d88"} Nov 25 15:59:23 crc kubenswrapper[4879]: I1125 15:59:23.288855 4879 generic.go:334] "Generic (PLEG): container finished" podID="a08e55e8-641b-4464-82cd-9414c1b7dab0" containerID="084c0162e8174faf7d9c9ca6ce874de1290fb4ce4ea2b385a1e6ffe32527189e" exitCode=0 Nov 25 15:59:23 crc kubenswrapper[4879]: I1125 15:59:23.288925 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3d30-account-create-vpdnx" event={"ID":"a08e55e8-641b-4464-82cd-9414c1b7dab0","Type":"ContainerDied","Data":"084c0162e8174faf7d9c9ca6ce874de1290fb4ce4ea2b385a1e6ffe32527189e"} Nov 25 15:59:23 crc kubenswrapper[4879]: I1125 15:59:23.288979 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3d30-account-create-vpdnx" event={"ID":"a08e55e8-641b-4464-82cd-9414c1b7dab0","Type":"ContainerStarted","Data":"f6e704cc08522d8516ce914258465613f4234b02202e653195bfd8038f210386"} Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.647555 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.653467 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.785385 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rb87\" (UniqueName: \"kubernetes.io/projected/a08e55e8-641b-4464-82cd-9414c1b7dab0-kube-api-access-5rb87\") pod \"a08e55e8-641b-4464-82cd-9414c1b7dab0\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.785507 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a08e55e8-641b-4464-82cd-9414c1b7dab0-operator-scripts\") pod \"a08e55e8-641b-4464-82cd-9414c1b7dab0\" (UID: \"a08e55e8-641b-4464-82cd-9414c1b7dab0\") " Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.785583 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/422d9538-42a1-42bb-8940-0e92f8f7e665-operator-scripts\") pod \"422d9538-42a1-42bb-8940-0e92f8f7e665\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.785603 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv9pt\" (UniqueName: \"kubernetes.io/projected/422d9538-42a1-42bb-8940-0e92f8f7e665-kube-api-access-mv9pt\") pod \"422d9538-42a1-42bb-8940-0e92f8f7e665\" (UID: \"422d9538-42a1-42bb-8940-0e92f8f7e665\") " Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.786039 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a08e55e8-641b-4464-82cd-9414c1b7dab0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a08e55e8-641b-4464-82cd-9414c1b7dab0" (UID: "a08e55e8-641b-4464-82cd-9414c1b7dab0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.786379 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/422d9538-42a1-42bb-8940-0e92f8f7e665-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "422d9538-42a1-42bb-8940-0e92f8f7e665" (UID: "422d9538-42a1-42bb-8940-0e92f8f7e665"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.791604 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a08e55e8-641b-4464-82cd-9414c1b7dab0-kube-api-access-5rb87" (OuterVolumeSpecName: "kube-api-access-5rb87") pod "a08e55e8-641b-4464-82cd-9414c1b7dab0" (UID: "a08e55e8-641b-4464-82cd-9414c1b7dab0"). InnerVolumeSpecName "kube-api-access-5rb87". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.793073 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/422d9538-42a1-42bb-8940-0e92f8f7e665-kube-api-access-mv9pt" (OuterVolumeSpecName: "kube-api-access-mv9pt") pod "422d9538-42a1-42bb-8940-0e92f8f7e665" (UID: "422d9538-42a1-42bb-8940-0e92f8f7e665"). InnerVolumeSpecName "kube-api-access-mv9pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.889271 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rb87\" (UniqueName: \"kubernetes.io/projected/a08e55e8-641b-4464-82cd-9414c1b7dab0-kube-api-access-5rb87\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.889319 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a08e55e8-641b-4464-82cd-9414c1b7dab0-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.889332 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/422d9538-42a1-42bb-8940-0e92f8f7e665-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:24 crc kubenswrapper[4879]: I1125 15:59:24.889343 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv9pt\" (UniqueName: \"kubernetes.io/projected/422d9538-42a1-42bb-8940-0e92f8f7e665-kube-api-access-mv9pt\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:25 crc kubenswrapper[4879]: I1125 15:59:25.306809 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d30-account-create-vpdnx" Nov 25 15:59:25 crc kubenswrapper[4879]: I1125 15:59:25.306790 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3d30-account-create-vpdnx" event={"ID":"a08e55e8-641b-4464-82cd-9414c1b7dab0","Type":"ContainerDied","Data":"f6e704cc08522d8516ce914258465613f4234b02202e653195bfd8038f210386"} Nov 25 15:59:25 crc kubenswrapper[4879]: I1125 15:59:25.307090 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6e704cc08522d8516ce914258465613f4234b02202e653195bfd8038f210386" Nov 25 15:59:25 crc kubenswrapper[4879]: I1125 15:59:25.308449 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-cl6z2" event={"ID":"422d9538-42a1-42bb-8940-0e92f8f7e665","Type":"ContainerDied","Data":"29d1e4e1a917f5f0cc0fea964344e470761a5a92482979e8c3843507244e2d88"} Nov 25 15:59:25 crc kubenswrapper[4879]: I1125 15:59:25.308474 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29d1e4e1a917f5f0cc0fea964344e470761a5a92482979e8c3843507244e2d88" Nov 25 15:59:25 crc kubenswrapper[4879]: I1125 15:59:25.308506 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-cl6z2" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.176140 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-2hwt8"] Nov 25 15:59:27 crc kubenswrapper[4879]: E1125 15:59:27.177842 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="422d9538-42a1-42bb-8940-0e92f8f7e665" containerName="mariadb-database-create" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.177972 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="422d9538-42a1-42bb-8940-0e92f8f7e665" containerName="mariadb-database-create" Nov 25 15:59:27 crc kubenswrapper[4879]: E1125 15:59:27.178072 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a08e55e8-641b-4464-82cd-9414c1b7dab0" containerName="mariadb-account-create" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.178179 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a08e55e8-641b-4464-82cd-9414c1b7dab0" containerName="mariadb-account-create" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.178489 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="422d9538-42a1-42bb-8940-0e92f8f7e665" containerName="mariadb-database-create" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.178603 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a08e55e8-641b-4464-82cd-9414c1b7dab0" containerName="mariadb-account-create" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.179746 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.182773 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-9kcrg" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.183134 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.190410 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2hwt8"] Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.226942 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-combined-ca-bundle\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.227347 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnzd7\" (UniqueName: \"kubernetes.io/projected/072c3164-fa81-4e53-8f73-c0c303da55e6-kube-api-access-wnzd7\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.227481 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-db-sync-config-data\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.328401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-combined-ca-bundle\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.328474 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnzd7\" (UniqueName: \"kubernetes.io/projected/072c3164-fa81-4e53-8f73-c0c303da55e6-kube-api-access-wnzd7\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.328503 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-db-sync-config-data\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.333848 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-combined-ca-bundle\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.337070 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-db-sync-config-data\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.349800 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnzd7\" (UniqueName: \"kubernetes.io/projected/072c3164-fa81-4e53-8f73-c0c303da55e6-kube-api-access-wnzd7\") pod \"barbican-db-sync-2hwt8\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.503455 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:27 crc kubenswrapper[4879]: I1125 15:59:27.987240 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-2hwt8"] Nov 25 15:59:28 crc kubenswrapper[4879]: I1125 15:59:28.332404 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2hwt8" event={"ID":"072c3164-fa81-4e53-8f73-c0c303da55e6","Type":"ContainerStarted","Data":"d5603e49571c4f4afbb7b7c251e432a3fd0d37b2fbbdd7ec9b25c9a3ef25bd0a"} Nov 25 15:59:28 crc kubenswrapper[4879]: I1125 15:59:28.332735 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2hwt8" event={"ID":"072c3164-fa81-4e53-8f73-c0c303da55e6","Type":"ContainerStarted","Data":"324ae509c394ed90340b55bb8cc3b8ba3feeb2aa1c7dabf30a09ca713dafa786"} Nov 25 15:59:28 crc kubenswrapper[4879]: I1125 15:59:28.352994 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-2hwt8" podStartSLOduration=1.352969523 podStartE2EDuration="1.352969523s" podCreationTimestamp="2025-11-25 15:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:59:28.349819734 +0000 UTC m=+5659.953232805" watchObservedRunningTime="2025-11-25 15:59:28.352969523 +0000 UTC m=+5659.956382594" Nov 25 15:59:30 crc kubenswrapper[4879]: I1125 15:59:30.348474 4879 generic.go:334] "Generic (PLEG): container finished" podID="072c3164-fa81-4e53-8f73-c0c303da55e6" containerID="d5603e49571c4f4afbb7b7c251e432a3fd0d37b2fbbdd7ec9b25c9a3ef25bd0a" exitCode=0 Nov 25 15:59:30 crc kubenswrapper[4879]: I1125 15:59:30.348522 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2hwt8" event={"ID":"072c3164-fa81-4e53-8f73-c0c303da55e6","Type":"ContainerDied","Data":"d5603e49571c4f4afbb7b7c251e432a3fd0d37b2fbbdd7ec9b25c9a3ef25bd0a"} Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.639040 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.807171 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-db-sync-config-data\") pod \"072c3164-fa81-4e53-8f73-c0c303da55e6\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.807362 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnzd7\" (UniqueName: \"kubernetes.io/projected/072c3164-fa81-4e53-8f73-c0c303da55e6-kube-api-access-wnzd7\") pod \"072c3164-fa81-4e53-8f73-c0c303da55e6\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.807430 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-combined-ca-bundle\") pod \"072c3164-fa81-4e53-8f73-c0c303da55e6\" (UID: \"072c3164-fa81-4e53-8f73-c0c303da55e6\") " Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.812756 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "072c3164-fa81-4e53-8f73-c0c303da55e6" (UID: "072c3164-fa81-4e53-8f73-c0c303da55e6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.812905 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/072c3164-fa81-4e53-8f73-c0c303da55e6-kube-api-access-wnzd7" (OuterVolumeSpecName: "kube-api-access-wnzd7") pod "072c3164-fa81-4e53-8f73-c0c303da55e6" (UID: "072c3164-fa81-4e53-8f73-c0c303da55e6"). InnerVolumeSpecName "kube-api-access-wnzd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.831110 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "072c3164-fa81-4e53-8f73-c0c303da55e6" (UID: "072c3164-fa81-4e53-8f73-c0c303da55e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.909159 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.909200 4879 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/072c3164-fa81-4e53-8f73-c0c303da55e6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:31 crc kubenswrapper[4879]: I1125 15:59:31.909221 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnzd7\" (UniqueName: \"kubernetes.io/projected/072c3164-fa81-4e53-8f73-c0c303da55e6-kube-api-access-wnzd7\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.367159 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-2hwt8" event={"ID":"072c3164-fa81-4e53-8f73-c0c303da55e6","Type":"ContainerDied","Data":"324ae509c394ed90340b55bb8cc3b8ba3feeb2aa1c7dabf30a09ca713dafa786"} Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.367497 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="324ae509c394ed90340b55bb8cc3b8ba3feeb2aa1c7dabf30a09ca713dafa786" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.367312 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-2hwt8" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.427718 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8wb8l"] Nov 25 15:59:32 crc kubenswrapper[4879]: E1125 15:59:32.428204 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072c3164-fa81-4e53-8f73-c0c303da55e6" containerName="barbican-db-sync" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.428226 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="072c3164-fa81-4e53-8f73-c0c303da55e6" containerName="barbican-db-sync" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.428414 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="072c3164-fa81-4e53-8f73-c0c303da55e6" containerName="barbican-db-sync" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.429591 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.444419 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8wb8l"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.521655 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz4rq\" (UniqueName: \"kubernetes.io/projected/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-kube-api-access-xz4rq\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.521735 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-utilities\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.521758 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-catalog-content\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.598774 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6b85ff7f6c-kfq49"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.600867 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.611669 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.611904 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-9kcrg" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.619346 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.625173 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz4rq\" (UniqueName: \"kubernetes.io/projected/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-kube-api-access-xz4rq\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.625260 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-utilities\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.625288 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-catalog-content\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.625879 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-utilities\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.631799 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-catalog-content\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.633345 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7c74ff6974-9qdrm"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.635154 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.639359 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.656295 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b85ff7f6c-kfq49"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.694783 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7c74ff6974-9qdrm"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.701187 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz4rq\" (UniqueName: \"kubernetes.io/projected/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-kube-api-access-xz4rq\") pod \"certified-operators-8wb8l\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.729738 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-combined-ca-bundle\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.729804 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de155fb1-303b-4959-a523-07f6f63f38f0-logs\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.729878 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pfsw\" (UniqueName: \"kubernetes.io/projected/de155fb1-303b-4959-a523-07f6f63f38f0-kube-api-access-6pfsw\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.729943 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-config-data-custom\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.730027 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-config-data\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.748752 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831530 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q72z2\" (UniqueName: \"kubernetes.io/projected/331f36f8-4a95-4b48-a224-7366506b0b1f-kube-api-access-q72z2\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831580 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-combined-ca-bundle\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831612 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de155fb1-303b-4959-a523-07f6f63f38f0-logs\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831642 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/331f36f8-4a95-4b48-a224-7366506b0b1f-logs\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831693 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-config-data\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831713 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pfsw\" (UniqueName: \"kubernetes.io/projected/de155fb1-303b-4959-a523-07f6f63f38f0-kube-api-access-6pfsw\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831751 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-config-data-custom\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831781 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-config-data-custom\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831801 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-combined-ca-bundle\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.831973 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-config-data\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.832635 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6c8f9c5c-khbz6"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.834381 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.835748 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de155fb1-303b-4959-a523-07f6f63f38f0-logs\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.849190 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-config-data\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.849751 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-config-data-custom\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.855069 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de155fb1-303b-4959-a523-07f6f63f38f0-combined-ca-bundle\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.857072 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6c8f9c5c-khbz6"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.873916 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pfsw\" (UniqueName: \"kubernetes.io/projected/de155fb1-303b-4959-a523-07f6f63f38f0-kube-api-access-6pfsw\") pod \"barbican-worker-6b85ff7f6c-kfq49\" (UID: \"de155fb1-303b-4959-a523-07f6f63f38f0\") " pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.873987 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-584c9766db-7l7kz"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.875743 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.882545 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.896139 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-584c9766db-7l7kz"] Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944396 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/331f36f8-4a95-4b48-a224-7366506b0b1f-logs\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944474 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-dns-svc\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944500 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12429423-60c1-42f9-bb72-bdb45cdddd45-logs\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944532 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-config-data\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944567 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-combined-ca-bundle\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944599 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-config\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944623 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-config-data-custom\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944645 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-config-data-custom\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944681 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hzv2\" (UniqueName: \"kubernetes.io/projected/9f0651c9-9e94-4c89-9247-38d646047f5d-kube-api-access-7hzv2\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944709 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjmlb\" (UniqueName: \"kubernetes.io/projected/12429423-60c1-42f9-bb72-bdb45cdddd45-kube-api-access-qjmlb\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944735 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944758 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-combined-ca-bundle\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944792 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944849 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-config-data\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.944924 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q72z2\" (UniqueName: \"kubernetes.io/projected/331f36f8-4a95-4b48-a224-7366506b0b1f-kube-api-access-q72z2\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.947870 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/331f36f8-4a95-4b48-a224-7366506b0b1f-logs\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.949890 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b85ff7f6c-kfq49" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.965739 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-combined-ca-bundle\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.966702 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-config-data-custom\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.969803 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/331f36f8-4a95-4b48-a224-7366506b0b1f-config-data\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.973690 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q72z2\" (UniqueName: \"kubernetes.io/projected/331f36f8-4a95-4b48-a224-7366506b0b1f-kube-api-access-q72z2\") pod \"barbican-keystone-listener-7c74ff6974-9qdrm\" (UID: \"331f36f8-4a95-4b48-a224-7366506b0b1f\") " pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:32 crc kubenswrapper[4879]: I1125 15:59:32.976293 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.048989 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-combined-ca-bundle\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049033 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-config\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049050 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-config-data-custom\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049078 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hzv2\" (UniqueName: \"kubernetes.io/projected/9f0651c9-9e94-4c89-9247-38d646047f5d-kube-api-access-7hzv2\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049105 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjmlb\" (UniqueName: \"kubernetes.io/projected/12429423-60c1-42f9-bb72-bdb45cdddd45-kube-api-access-qjmlb\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049145 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049168 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049212 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-config-data\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049302 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-dns-svc\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.049319 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12429423-60c1-42f9-bb72-bdb45cdddd45-logs\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.056493 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-config\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.057304 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/12429423-60c1-42f9-bb72-bdb45cdddd45-logs\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.057956 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.059382 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.061898 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-dns-svc\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.071309 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-combined-ca-bundle\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.074095 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-config-data-custom\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.078238 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12429423-60c1-42f9-bb72-bdb45cdddd45-config-data\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.079097 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hzv2\" (UniqueName: \"kubernetes.io/projected/9f0651c9-9e94-4c89-9247-38d646047f5d-kube-api-access-7hzv2\") pod \"dnsmasq-dns-6c6c8f9c5c-khbz6\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.093812 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjmlb\" (UniqueName: \"kubernetes.io/projected/12429423-60c1-42f9-bb72-bdb45cdddd45-kube-api-access-qjmlb\") pod \"barbican-api-584c9766db-7l7kz\" (UID: \"12429423-60c1-42f9-bb72-bdb45cdddd45\") " pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.246344 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.361623 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.397598 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8wb8l"] Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.532516 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7c74ff6974-9qdrm"] Nov 25 15:59:33 crc kubenswrapper[4879]: W1125 15:59:33.543395 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod331f36f8_4a95_4b48_a224_7366506b0b1f.slice/crio-6845f188cd985a0783bc3ae1922c97c2f0ebcb3c29eb8e5d90dc1922e507dd29 WatchSource:0}: Error finding container 6845f188cd985a0783bc3ae1922c97c2f0ebcb3c29eb8e5d90dc1922e507dd29: Status 404 returned error can't find the container with id 6845f188cd985a0783bc3ae1922c97c2f0ebcb3c29eb8e5d90dc1922e507dd29 Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.607843 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b85ff7f6c-kfq49"] Nov 25 15:59:33 crc kubenswrapper[4879]: W1125 15:59:33.622020 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde155fb1_303b_4959_a523_07f6f63f38f0.slice/crio-0715648b7e1bb756990a7a398586d0027486d365f9a9c6560f3cf82de0f21b86 WatchSource:0}: Error finding container 0715648b7e1bb756990a7a398586d0027486d365f9a9c6560f3cf82de0f21b86: Status 404 returned error can't find the container with id 0715648b7e1bb756990a7a398586d0027486d365f9a9c6560f3cf82de0f21b86 Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.842528 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6c8f9c5c-khbz6"] Nov 25 15:59:33 crc kubenswrapper[4879]: I1125 15:59:33.957778 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-584c9766db-7l7kz"] Nov 25 15:59:34 crc kubenswrapper[4879]: W1125 15:59:34.001346 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12429423_60c1_42f9_bb72_bdb45cdddd45.slice/crio-4985b9637dd315cf396fa4bb6e62be9e9c4e8166134b4b001f80e5261f737fd2 WatchSource:0}: Error finding container 4985b9637dd315cf396fa4bb6e62be9e9c4e8166134b4b001f80e5261f737fd2: Status 404 returned error can't find the container with id 4985b9637dd315cf396fa4bb6e62be9e9c4e8166134b4b001f80e5261f737fd2 Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.399850 4879 generic.go:334] "Generic (PLEG): container finished" podID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerID="f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7" exitCode=0 Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.399942 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" event={"ID":"9f0651c9-9e94-4c89-9247-38d646047f5d","Type":"ContainerDied","Data":"f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.399978 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" event={"ID":"9f0651c9-9e94-4c89-9247-38d646047f5d","Type":"ContainerStarted","Data":"0327a69c7d08cd77eb70ceb0189a434b473be5f9ab0a1c966043f43494b6c360"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.402273 4879 generic.go:334] "Generic (PLEG): container finished" podID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerID="6d3a9dbb06fd505e52baf01563106e39d65f498ecdedb8a1b443add2667151fc" exitCode=0 Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.402362 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8wb8l" event={"ID":"de07c9d3-b7ed-4da0-892e-bb589f1fd35a","Type":"ContainerDied","Data":"6d3a9dbb06fd505e52baf01563106e39d65f498ecdedb8a1b443add2667151fc"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.402397 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8wb8l" event={"ID":"de07c9d3-b7ed-4da0-892e-bb589f1fd35a","Type":"ContainerStarted","Data":"1c55ccf718e73bcc4f621fb00e5718551589ef297aa9f9d95097241ad22af23a"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.404288 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" event={"ID":"331f36f8-4a95-4b48-a224-7366506b0b1f","Type":"ContainerStarted","Data":"6238812e94373b18fcaaa78d81db0feb1db42c434194a2cbfe7ffbb877c1cecf"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.404332 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" event={"ID":"331f36f8-4a95-4b48-a224-7366506b0b1f","Type":"ContainerStarted","Data":"27cd382a79a729c5f9b1db66d862a07fa77aec3f396cc8443ac541b54b0e9dd7"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.404342 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" event={"ID":"331f36f8-4a95-4b48-a224-7366506b0b1f","Type":"ContainerStarted","Data":"6845f188cd985a0783bc3ae1922c97c2f0ebcb3c29eb8e5d90dc1922e507dd29"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.404378 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.411227 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b85ff7f6c-kfq49" event={"ID":"de155fb1-303b-4959-a523-07f6f63f38f0","Type":"ContainerStarted","Data":"5ccd120b844f3911e0000fcafe2ab58174df1e77b061a7b14009894191d2a4b3"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.411275 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b85ff7f6c-kfq49" event={"ID":"de155fb1-303b-4959-a523-07f6f63f38f0","Type":"ContainerStarted","Data":"c0fa75152298907a18f68c5a6779c0a492e7833f2dc82fa941a9edff5cbc29d5"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.411405 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b85ff7f6c-kfq49" event={"ID":"de155fb1-303b-4959-a523-07f6f63f38f0","Type":"ContainerStarted","Data":"0715648b7e1bb756990a7a398586d0027486d365f9a9c6560f3cf82de0f21b86"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.420523 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-584c9766db-7l7kz" event={"ID":"12429423-60c1-42f9-bb72-bdb45cdddd45","Type":"ContainerStarted","Data":"09dc479007d72a7e7323b070ffb92a25fa5dd8d38f139c8ab62a317778b277df"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.420568 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-584c9766db-7l7kz" event={"ID":"12429423-60c1-42f9-bb72-bdb45cdddd45","Type":"ContainerStarted","Data":"27f0075da738c3f68d46b7133be442ed24db79fd12cdfeea4563c91638a65b73"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.420580 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-584c9766db-7l7kz" event={"ID":"12429423-60c1-42f9-bb72-bdb45cdddd45","Type":"ContainerStarted","Data":"4985b9637dd315cf396fa4bb6e62be9e9c4e8166134b4b001f80e5261f737fd2"} Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.421325 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.421353 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.510814 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6b85ff7f6c-kfq49" podStartSLOduration=2.510795601 podStartE2EDuration="2.510795601s" podCreationTimestamp="2025-11-25 15:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:59:34.466482136 +0000 UTC m=+5666.069895217" watchObservedRunningTime="2025-11-25 15:59:34.510795601 +0000 UTC m=+5666.114208682" Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.533192 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-584c9766db-7l7kz" podStartSLOduration=2.5331710149999997 podStartE2EDuration="2.533171015s" podCreationTimestamp="2025-11-25 15:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:59:34.532454274 +0000 UTC m=+5666.135867345" watchObservedRunningTime="2025-11-25 15:59:34.533171015 +0000 UTC m=+5666.136584096" Nov 25 15:59:34 crc kubenswrapper[4879]: I1125 15:59:34.580029 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7c74ff6974-9qdrm" podStartSLOduration=2.580008971 podStartE2EDuration="2.580008971s" podCreationTimestamp="2025-11-25 15:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:59:34.572479718 +0000 UTC m=+5666.175892799" watchObservedRunningTime="2025-11-25 15:59:34.580008971 +0000 UTC m=+5666.183422042" Nov 25 15:59:35 crc kubenswrapper[4879]: I1125 15:59:35.430588 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" event={"ID":"9f0651c9-9e94-4c89-9247-38d646047f5d","Type":"ContainerStarted","Data":"2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e"} Nov 25 15:59:35 crc kubenswrapper[4879]: I1125 15:59:35.431299 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:35 crc kubenswrapper[4879]: I1125 15:59:35.434476 4879 generic.go:334] "Generic (PLEG): container finished" podID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerID="c8cc55c21470afc6132e0129f83739e4899301ca25db2036aecde21ca12dbea9" exitCode=0 Nov 25 15:59:35 crc kubenswrapper[4879]: I1125 15:59:35.434612 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8wb8l" event={"ID":"de07c9d3-b7ed-4da0-892e-bb589f1fd35a","Type":"ContainerDied","Data":"c8cc55c21470afc6132e0129f83739e4899301ca25db2036aecde21ca12dbea9"} Nov 25 15:59:35 crc kubenswrapper[4879]: I1125 15:59:35.456934 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" podStartSLOduration=3.456915432 podStartE2EDuration="3.456915432s" podCreationTimestamp="2025-11-25 15:59:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:59:35.452811306 +0000 UTC m=+5667.056224397" watchObservedRunningTime="2025-11-25 15:59:35.456915432 +0000 UTC m=+5667.060328503" Nov 25 15:59:36 crc kubenswrapper[4879]: I1125 15:59:36.448921 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8wb8l" event={"ID":"de07c9d3-b7ed-4da0-892e-bb589f1fd35a","Type":"ContainerStarted","Data":"f56647f337944c9867483e8b3e67c5a28abfba185432e7043cd6c7773de312c8"} Nov 25 15:59:36 crc kubenswrapper[4879]: I1125 15:59:36.474039 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8wb8l" podStartSLOduration=3.043673281 podStartE2EDuration="4.474022313s" podCreationTimestamp="2025-11-25 15:59:32 +0000 UTC" firstStartedPulling="2025-11-25 15:59:34.404024218 +0000 UTC m=+5666.007437289" lastFinishedPulling="2025-11-25 15:59:35.83437325 +0000 UTC m=+5667.437786321" observedRunningTime="2025-11-25 15:59:36.465854251 +0000 UTC m=+5668.069267332" watchObservedRunningTime="2025-11-25 15:59:36.474022313 +0000 UTC m=+5668.077435384" Nov 25 15:59:42 crc kubenswrapper[4879]: I1125 15:59:42.754364 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:42 crc kubenswrapper[4879]: I1125 15:59:42.754943 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:42 crc kubenswrapper[4879]: I1125 15:59:42.807649 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.249460 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.327776 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bcbbddf7-jwnlk"] Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.328049 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerName="dnsmasq-dns" containerID="cri-o://64e2774bd6094460f1787c9823f885ab357af220d911aec0a6fde040062008cd" gracePeriod=10 Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.516052 4879 generic.go:334] "Generic (PLEG): container finished" podID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerID="64e2774bd6094460f1787c9823f885ab357af220d911aec0a6fde040062008cd" exitCode=0 Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.517022 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" event={"ID":"9a8e4590-eb09-4f47-9b2e-36946792bdd5","Type":"ContainerDied","Data":"64e2774bd6094460f1787c9823f885ab357af220d911aec0a6fde040062008cd"} Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.580594 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:43 crc kubenswrapper[4879]: I1125 15:59:43.925542 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.059775 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lldft\" (UniqueName: \"kubernetes.io/projected/9a8e4590-eb09-4f47-9b2e-36946792bdd5-kube-api-access-lldft\") pod \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.059943 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-config\") pod \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.060053 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-nb\") pod \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.060148 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-sb\") pod \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.060207 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-dns-svc\") pod \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\" (UID: \"9a8e4590-eb09-4f47-9b2e-36946792bdd5\") " Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.073063 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a8e4590-eb09-4f47-9b2e-36946792bdd5-kube-api-access-lldft" (OuterVolumeSpecName: "kube-api-access-lldft") pod "9a8e4590-eb09-4f47-9b2e-36946792bdd5" (UID: "9a8e4590-eb09-4f47-9b2e-36946792bdd5"). InnerVolumeSpecName "kube-api-access-lldft". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.103805 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9a8e4590-eb09-4f47-9b2e-36946792bdd5" (UID: "9a8e4590-eb09-4f47-9b2e-36946792bdd5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.104175 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9a8e4590-eb09-4f47-9b2e-36946792bdd5" (UID: "9a8e4590-eb09-4f47-9b2e-36946792bdd5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.108410 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-config" (OuterVolumeSpecName: "config") pod "9a8e4590-eb09-4f47-9b2e-36946792bdd5" (UID: "9a8e4590-eb09-4f47-9b2e-36946792bdd5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.109925 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9a8e4590-eb09-4f47-9b2e-36946792bdd5" (UID: "9a8e4590-eb09-4f47-9b2e-36946792bdd5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.164621 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.164678 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.164691 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.164703 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lldft\" (UniqueName: \"kubernetes.io/projected/9a8e4590-eb09-4f47-9b2e-36946792bdd5-kube-api-access-lldft\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.164718 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a8e4590-eb09-4f47-9b2e-36946792bdd5-config\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.530821 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.536624 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bcbbddf7-jwnlk" event={"ID":"9a8e4590-eb09-4f47-9b2e-36946792bdd5","Type":"ContainerDied","Data":"f84f9032f322eeebc38a7251f874351d5d41d97e0fc954a885a3fa50ac72fa08"} Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.536741 4879 scope.go:117] "RemoveContainer" containerID="64e2774bd6094460f1787c9823f885ab357af220d911aec0a6fde040062008cd" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.578323 4879 scope.go:117] "RemoveContainer" containerID="5e1e663aa1c15afeee6b7a8ded3ace8f07ac11ca358ba44d2650668d51537e5f" Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.586908 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bcbbddf7-jwnlk"] Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.594990 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bcbbddf7-jwnlk"] Nov 25 15:59:44 crc kubenswrapper[4879]: I1125 15:59:44.921946 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:45 crc kubenswrapper[4879]: I1125 15:59:45.055427 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-584c9766db-7l7kz" Nov 25 15:59:45 crc kubenswrapper[4879]: I1125 15:59:45.654204 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" path="/var/lib/kubelet/pods/9a8e4590-eb09-4f47-9b2e-36946792bdd5/volumes" Nov 25 15:59:47 crc kubenswrapper[4879]: I1125 15:59:46.905779 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8wb8l"] Nov 25 15:59:47 crc kubenswrapper[4879]: I1125 15:59:46.905995 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8wb8l" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="registry-server" containerID="cri-o://f56647f337944c9867483e8b3e67c5a28abfba185432e7043cd6c7773de312c8" gracePeriod=2 Nov 25 15:59:47 crc kubenswrapper[4879]: I1125 15:59:47.572432 4879 generic.go:334] "Generic (PLEG): container finished" podID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerID="f56647f337944c9867483e8b3e67c5a28abfba185432e7043cd6c7773de312c8" exitCode=0 Nov 25 15:59:47 crc kubenswrapper[4879]: I1125 15:59:47.572518 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8wb8l" event={"ID":"de07c9d3-b7ed-4da0-892e-bb589f1fd35a","Type":"ContainerDied","Data":"f56647f337944c9867483e8b3e67c5a28abfba185432e7043cd6c7773de312c8"} Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.148604 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.264198 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-utilities\") pod \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.264310 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz4rq\" (UniqueName: \"kubernetes.io/projected/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-kube-api-access-xz4rq\") pod \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.264369 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-catalog-content\") pod \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\" (UID: \"de07c9d3-b7ed-4da0-892e-bb589f1fd35a\") " Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.265080 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-utilities" (OuterVolumeSpecName: "utilities") pod "de07c9d3-b7ed-4da0-892e-bb589f1fd35a" (UID: "de07c9d3-b7ed-4da0-892e-bb589f1fd35a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.280908 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-kube-api-access-xz4rq" (OuterVolumeSpecName: "kube-api-access-xz4rq") pod "de07c9d3-b7ed-4da0-892e-bb589f1fd35a" (UID: "de07c9d3-b7ed-4da0-892e-bb589f1fd35a"). InnerVolumeSpecName "kube-api-access-xz4rq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.313091 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de07c9d3-b7ed-4da0-892e-bb589f1fd35a" (UID: "de07c9d3-b7ed-4da0-892e-bb589f1fd35a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.366308 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.366341 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz4rq\" (UniqueName: \"kubernetes.io/projected/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-kube-api-access-xz4rq\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.366352 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de07c9d3-b7ed-4da0-892e-bb589f1fd35a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.583760 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8wb8l" event={"ID":"de07c9d3-b7ed-4da0-892e-bb589f1fd35a","Type":"ContainerDied","Data":"1c55ccf718e73bcc4f621fb00e5718551589ef297aa9f9d95097241ad22af23a"} Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.583847 4879 scope.go:117] "RemoveContainer" containerID="f56647f337944c9867483e8b3e67c5a28abfba185432e7043cd6c7773de312c8" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.583905 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8wb8l" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.619457 4879 scope.go:117] "RemoveContainer" containerID="c8cc55c21470afc6132e0129f83739e4899301ca25db2036aecde21ca12dbea9" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.623203 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8wb8l"] Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.638728 4879 scope.go:117] "RemoveContainer" containerID="6d3a9dbb06fd505e52baf01563106e39d65f498ecdedb8a1b443add2667151fc" Nov 25 15:59:48 crc kubenswrapper[4879]: I1125 15:59:48.642406 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8wb8l"] Nov 25 15:59:49 crc kubenswrapper[4879]: I1125 15:59:49.663780 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" path="/var/lib/kubelet/pods/de07c9d3-b7ed-4da0-892e-bb589f1fd35a/volumes" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.452457 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-md922"] Nov 25 15:59:56 crc kubenswrapper[4879]: E1125 15:59:56.453473 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerName="dnsmasq-dns" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453490 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerName="dnsmasq-dns" Nov 25 15:59:56 crc kubenswrapper[4879]: E1125 15:59:56.453513 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="registry-server" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453521 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="registry-server" Nov 25 15:59:56 crc kubenswrapper[4879]: E1125 15:59:56.453554 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerName="init" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453562 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerName="init" Nov 25 15:59:56 crc kubenswrapper[4879]: E1125 15:59:56.453584 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="extract-utilities" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453592 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="extract-utilities" Nov 25 15:59:56 crc kubenswrapper[4879]: E1125 15:59:56.453603 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="extract-content" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453612 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="extract-content" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453804 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="de07c9d3-b7ed-4da0-892e-bb589f1fd35a" containerName="registry-server" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.453835 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a8e4590-eb09-4f47-9b2e-36946792bdd5" containerName="dnsmasq-dns" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.457865 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.463628 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-md922"] Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.553824 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-53eb-account-create-2jd8c"] Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.558181 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.561411 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.585368 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-53eb-account-create-2jd8c"] Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.611270 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fc8d596-9a9f-4e6b-925a-14d04a259781-operator-scripts\") pod \"neutron-db-create-md922\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.611387 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqvjf\" (UniqueName: \"kubernetes.io/projected/0fc8d596-9a9f-4e6b-925a-14d04a259781-kube-api-access-fqvjf\") pod \"neutron-db-create-md922\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.713070 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7c2m\" (UniqueName: \"kubernetes.io/projected/7a20f1c1-7078-41b2-a79f-bfad722b8f70-kube-api-access-s7c2m\") pod \"neutron-53eb-account-create-2jd8c\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.713169 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20f1c1-7078-41b2-a79f-bfad722b8f70-operator-scripts\") pod \"neutron-53eb-account-create-2jd8c\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.713220 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fc8d596-9a9f-4e6b-925a-14d04a259781-operator-scripts\") pod \"neutron-db-create-md922\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.713305 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqvjf\" (UniqueName: \"kubernetes.io/projected/0fc8d596-9a9f-4e6b-925a-14d04a259781-kube-api-access-fqvjf\") pod \"neutron-db-create-md922\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.713932 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fc8d596-9a9f-4e6b-925a-14d04a259781-operator-scripts\") pod \"neutron-db-create-md922\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.734649 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqvjf\" (UniqueName: \"kubernetes.io/projected/0fc8d596-9a9f-4e6b-925a-14d04a259781-kube-api-access-fqvjf\") pod \"neutron-db-create-md922\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.778156 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-md922" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.816183 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20f1c1-7078-41b2-a79f-bfad722b8f70-operator-scripts\") pod \"neutron-53eb-account-create-2jd8c\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.816916 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7c2m\" (UniqueName: \"kubernetes.io/projected/7a20f1c1-7078-41b2-a79f-bfad722b8f70-kube-api-access-s7c2m\") pod \"neutron-53eb-account-create-2jd8c\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.817144 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20f1c1-7078-41b2-a79f-bfad722b8f70-operator-scripts\") pod \"neutron-53eb-account-create-2jd8c\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.833952 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7c2m\" (UniqueName: \"kubernetes.io/projected/7a20f1c1-7078-41b2-a79f-bfad722b8f70-kube-api-access-s7c2m\") pod \"neutron-53eb-account-create-2jd8c\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:56 crc kubenswrapper[4879]: I1125 15:59:56.899842 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.230025 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-md922"] Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.355223 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-53eb-account-create-2jd8c"] Nov 25 15:59:57 crc kubenswrapper[4879]: W1125 15:59:57.360108 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a20f1c1_7078_41b2_a79f_bfad722b8f70.slice/crio-a91bd00f2a1b84a009fd6bff5c9ce2b623aebbd342ec4b2fd337105105b09cc2 WatchSource:0}: Error finding container a91bd00f2a1b84a009fd6bff5c9ce2b623aebbd342ec4b2fd337105105b09cc2: Status 404 returned error can't find the container with id a91bd00f2a1b84a009fd6bff5c9ce2b623aebbd342ec4b2fd337105105b09cc2 Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.655762 4879 generic.go:334] "Generic (PLEG): container finished" podID="0fc8d596-9a9f-4e6b-925a-14d04a259781" containerID="3e340d3ed4e470268fc941715696f0a7cdd50f54533ddd573c8b694c61e86dae" exitCode=0 Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.655850 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-md922" event={"ID":"0fc8d596-9a9f-4e6b-925a-14d04a259781","Type":"ContainerDied","Data":"3e340d3ed4e470268fc941715696f0a7cdd50f54533ddd573c8b694c61e86dae"} Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.656223 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-md922" event={"ID":"0fc8d596-9a9f-4e6b-925a-14d04a259781","Type":"ContainerStarted","Data":"12ed88763cf215bfc98c2a54f80279268cb08d6de146a9f81255702b84fad372"} Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.657976 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-53eb-account-create-2jd8c" event={"ID":"7a20f1c1-7078-41b2-a79f-bfad722b8f70","Type":"ContainerStarted","Data":"8fd5412b15d873c8b1171efaff172adf66703c189a7b1e222d8885297e240f99"} Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.658001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-53eb-account-create-2jd8c" event={"ID":"7a20f1c1-7078-41b2-a79f-bfad722b8f70","Type":"ContainerStarted","Data":"a91bd00f2a1b84a009fd6bff5c9ce2b623aebbd342ec4b2fd337105105b09cc2"} Nov 25 15:59:57 crc kubenswrapper[4879]: I1125 15:59:57.687672 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-53eb-account-create-2jd8c" podStartSLOduration=1.6876513979999999 podStartE2EDuration="1.687651398s" podCreationTimestamp="2025-11-25 15:59:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 15:59:57.681059941 +0000 UTC m=+5689.284473012" watchObservedRunningTime="2025-11-25 15:59:57.687651398 +0000 UTC m=+5689.291064469" Nov 25 15:59:58 crc kubenswrapper[4879]: I1125 15:59:58.674863 4879 generic.go:334] "Generic (PLEG): container finished" podID="7a20f1c1-7078-41b2-a79f-bfad722b8f70" containerID="8fd5412b15d873c8b1171efaff172adf66703c189a7b1e222d8885297e240f99" exitCode=0 Nov 25 15:59:58 crc kubenswrapper[4879]: I1125 15:59:58.674952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-53eb-account-create-2jd8c" event={"ID":"7a20f1c1-7078-41b2-a79f-bfad722b8f70","Type":"ContainerDied","Data":"8fd5412b15d873c8b1171efaff172adf66703c189a7b1e222d8885297e240f99"} Nov 25 15:59:58 crc kubenswrapper[4879]: I1125 15:59:58.995622 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-md922" Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.155305 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqvjf\" (UniqueName: \"kubernetes.io/projected/0fc8d596-9a9f-4e6b-925a-14d04a259781-kube-api-access-fqvjf\") pod \"0fc8d596-9a9f-4e6b-925a-14d04a259781\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.155514 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fc8d596-9a9f-4e6b-925a-14d04a259781-operator-scripts\") pod \"0fc8d596-9a9f-4e6b-925a-14d04a259781\" (UID: \"0fc8d596-9a9f-4e6b-925a-14d04a259781\") " Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.156218 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fc8d596-9a9f-4e6b-925a-14d04a259781-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0fc8d596-9a9f-4e6b-925a-14d04a259781" (UID: "0fc8d596-9a9f-4e6b-925a-14d04a259781"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.163922 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc8d596-9a9f-4e6b-925a-14d04a259781-kube-api-access-fqvjf" (OuterVolumeSpecName: "kube-api-access-fqvjf") pod "0fc8d596-9a9f-4e6b-925a-14d04a259781" (UID: "0fc8d596-9a9f-4e6b-925a-14d04a259781"). InnerVolumeSpecName "kube-api-access-fqvjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.258074 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fc8d596-9a9f-4e6b-925a-14d04a259781-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.258150 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqvjf\" (UniqueName: \"kubernetes.io/projected/0fc8d596-9a9f-4e6b-925a-14d04a259781-kube-api-access-fqvjf\") on node \"crc\" DevicePath \"\"" Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.686067 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-md922" Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.686064 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-md922" event={"ID":"0fc8d596-9a9f-4e6b-925a-14d04a259781","Type":"ContainerDied","Data":"12ed88763cf215bfc98c2a54f80279268cb08d6de146a9f81255702b84fad372"} Nov 25 15:59:59 crc kubenswrapper[4879]: I1125 15:59:59.686149 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12ed88763cf215bfc98c2a54f80279268cb08d6de146a9f81255702b84fad372" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.100154 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.149542 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4"] Nov 25 16:00:00 crc kubenswrapper[4879]: E1125 16:00:00.149992 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a20f1c1-7078-41b2-a79f-bfad722b8f70" containerName="mariadb-account-create" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.150024 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a20f1c1-7078-41b2-a79f-bfad722b8f70" containerName="mariadb-account-create" Nov 25 16:00:00 crc kubenswrapper[4879]: E1125 16:00:00.150052 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc8d596-9a9f-4e6b-925a-14d04a259781" containerName="mariadb-database-create" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.150058 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc8d596-9a9f-4e6b-925a-14d04a259781" containerName="mariadb-database-create" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.150273 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a20f1c1-7078-41b2-a79f-bfad722b8f70" containerName="mariadb-account-create" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.150291 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fc8d596-9a9f-4e6b-925a-14d04a259781" containerName="mariadb-database-create" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.150915 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.154445 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.154833 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.165421 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4"] Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.275837 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7c2m\" (UniqueName: \"kubernetes.io/projected/7a20f1c1-7078-41b2-a79f-bfad722b8f70-kube-api-access-s7c2m\") pod \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.275994 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20f1c1-7078-41b2-a79f-bfad722b8f70-operator-scripts\") pod \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\" (UID: \"7a20f1c1-7078-41b2-a79f-bfad722b8f70\") " Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.276356 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jknt5\" (UniqueName: \"kubernetes.io/projected/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-kube-api-access-jknt5\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.276395 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-config-volume\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.276448 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-secret-volume\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.276692 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a20f1c1-7078-41b2-a79f-bfad722b8f70-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a20f1c1-7078-41b2-a79f-bfad722b8f70" (UID: "7a20f1c1-7078-41b2-a79f-bfad722b8f70"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.280236 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a20f1c1-7078-41b2-a79f-bfad722b8f70-kube-api-access-s7c2m" (OuterVolumeSpecName: "kube-api-access-s7c2m") pod "7a20f1c1-7078-41b2-a79f-bfad722b8f70" (UID: "7a20f1c1-7078-41b2-a79f-bfad722b8f70"). InnerVolumeSpecName "kube-api-access-s7c2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.378210 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-secret-volume\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.378347 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jknt5\" (UniqueName: \"kubernetes.io/projected/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-kube-api-access-jknt5\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.378375 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-config-volume\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.378444 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7c2m\" (UniqueName: \"kubernetes.io/projected/7a20f1c1-7078-41b2-a79f-bfad722b8f70-kube-api-access-s7c2m\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.378480 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20f1c1-7078-41b2-a79f-bfad722b8f70-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.379411 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-config-volume\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.383202 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-secret-volume\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.404113 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jknt5\" (UniqueName: \"kubernetes.io/projected/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-kube-api-access-jknt5\") pod \"collect-profiles-29401440-j5ch4\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.471947 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.707425 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-53eb-account-create-2jd8c" event={"ID":"7a20f1c1-7078-41b2-a79f-bfad722b8f70","Type":"ContainerDied","Data":"a91bd00f2a1b84a009fd6bff5c9ce2b623aebbd342ec4b2fd337105105b09cc2"} Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.707820 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a91bd00f2a1b84a009fd6bff5c9ce2b623aebbd342ec4b2fd337105105b09cc2" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.707708 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-53eb-account-create-2jd8c" Nov 25 16:00:00 crc kubenswrapper[4879]: I1125 16:00:00.912937 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4"] Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.719742 4879 generic.go:334] "Generic (PLEG): container finished" podID="5e3d9297-b45d-43a2-8169-4b5b4b7965dc" containerID="ab1e87d9f02145d94806aa3e63257cf261ef051cf745e513afafd69ba4af1a9d" exitCode=0 Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.719871 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" event={"ID":"5e3d9297-b45d-43a2-8169-4b5b4b7965dc","Type":"ContainerDied","Data":"ab1e87d9f02145d94806aa3e63257cf261ef051cf745e513afafd69ba4af1a9d"} Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.720087 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" event={"ID":"5e3d9297-b45d-43a2-8169-4b5b4b7965dc","Type":"ContainerStarted","Data":"1a6905a70195dcc9ead38ff35d2897f9ef888ce0799596d6653cb869ba7c1a64"} Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.942945 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-2tc2v"] Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.944312 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.947348 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.950095 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.951798 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4rbqh" Nov 25 16:00:01 crc kubenswrapper[4879]: I1125 16:00:01.952773 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2tc2v"] Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.006436 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4l27\" (UniqueName: \"kubernetes.io/projected/64721e41-24b7-4492-825b-f25a899324f2-kube-api-access-l4l27\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.007675 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-config\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.007953 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-combined-ca-bundle\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.110942 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4l27\" (UniqueName: \"kubernetes.io/projected/64721e41-24b7-4492-825b-f25a899324f2-kube-api-access-l4l27\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.111066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-config\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.111186 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-combined-ca-bundle\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.119637 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-config\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.119848 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-combined-ca-bundle\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.129041 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4l27\" (UniqueName: \"kubernetes.io/projected/64721e41-24b7-4492-825b-f25a899324f2-kube-api-access-l4l27\") pod \"neutron-db-sync-2tc2v\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.262401 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.696143 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2tc2v"] Nov 25 16:00:02 crc kubenswrapper[4879]: W1125 16:00:02.698350 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice/crio-4b8560011da3921a31b4b83cb645045829d40be62be59963aac34ee1596d7644 WatchSource:0}: Error finding container 4b8560011da3921a31b4b83cb645045829d40be62be59963aac34ee1596d7644: Status 404 returned error can't find the container with id 4b8560011da3921a31b4b83cb645045829d40be62be59963aac34ee1596d7644 Nov 25 16:00:02 crc kubenswrapper[4879]: I1125 16:00:02.732053 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2tc2v" event={"ID":"64721e41-24b7-4492-825b-f25a899324f2","Type":"ContainerStarted","Data":"4b8560011da3921a31b4b83cb645045829d40be62be59963aac34ee1596d7644"} Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.004017 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.132879 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-secret-volume\") pod \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.132974 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jknt5\" (UniqueName: \"kubernetes.io/projected/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-kube-api-access-jknt5\") pod \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.133026 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-config-volume\") pod \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\" (UID: \"5e3d9297-b45d-43a2-8169-4b5b4b7965dc\") " Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.134324 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-config-volume" (OuterVolumeSpecName: "config-volume") pod "5e3d9297-b45d-43a2-8169-4b5b4b7965dc" (UID: "5e3d9297-b45d-43a2-8169-4b5b4b7965dc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.144271 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-kube-api-access-jknt5" (OuterVolumeSpecName: "kube-api-access-jknt5") pod "5e3d9297-b45d-43a2-8169-4b5b4b7965dc" (UID: "5e3d9297-b45d-43a2-8169-4b5b4b7965dc"). InnerVolumeSpecName "kube-api-access-jknt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.144410 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5e3d9297-b45d-43a2-8169-4b5b4b7965dc" (UID: "5e3d9297-b45d-43a2-8169-4b5b4b7965dc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.234892 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jknt5\" (UniqueName: \"kubernetes.io/projected/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-kube-api-access-jknt5\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.234938 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.234952 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5e3d9297-b45d-43a2-8169-4b5b4b7965dc-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.745536 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2tc2v" event={"ID":"64721e41-24b7-4492-825b-f25a899324f2","Type":"ContainerStarted","Data":"9d7af6e22b3298dbd6185ae66e5eec9b6f504a46a96dd60d6986d7bd10fbbcaa"} Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.748897 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" event={"ID":"5e3d9297-b45d-43a2-8169-4b5b4b7965dc","Type":"ContainerDied","Data":"1a6905a70195dcc9ead38ff35d2897f9ef888ce0799596d6653cb869ba7c1a64"} Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.748971 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a6905a70195dcc9ead38ff35d2897f9ef888ce0799596d6653cb869ba7c1a64" Nov 25 16:00:03 crc kubenswrapper[4879]: I1125 16:00:03.749049 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4" Nov 25 16:00:04 crc kubenswrapper[4879]: I1125 16:00:04.047050 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-2tc2v" podStartSLOduration=3.047024952 podStartE2EDuration="3.047024952s" podCreationTimestamp="2025-11-25 16:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:00:03.770908103 +0000 UTC m=+5695.374321184" watchObservedRunningTime="2025-11-25 16:00:04.047024952 +0000 UTC m=+5695.650438023" Nov 25 16:00:04 crc kubenswrapper[4879]: I1125 16:00:04.106855 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt"] Nov 25 16:00:04 crc kubenswrapper[4879]: I1125 16:00:04.130530 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401395-rzrqt"] Nov 25 16:00:05 crc kubenswrapper[4879]: I1125 16:00:05.658385 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6120e86e-3bfb-4c54-a197-86517ce67e78" path="/var/lib/kubelet/pods/6120e86e-3bfb-4c54-a197-86517ce67e78/volumes" Nov 25 16:00:07 crc kubenswrapper[4879]: I1125 16:00:07.786349 4879 generic.go:334] "Generic (PLEG): container finished" podID="64721e41-24b7-4492-825b-f25a899324f2" containerID="9d7af6e22b3298dbd6185ae66e5eec9b6f504a46a96dd60d6986d7bd10fbbcaa" exitCode=0 Nov 25 16:00:07 crc kubenswrapper[4879]: I1125 16:00:07.786461 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2tc2v" event={"ID":"64721e41-24b7-4492-825b-f25a899324f2","Type":"ContainerDied","Data":"9d7af6e22b3298dbd6185ae66e5eec9b6f504a46a96dd60d6986d7bd10fbbcaa"} Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.106759 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.244969 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4l27\" (UniqueName: \"kubernetes.io/projected/64721e41-24b7-4492-825b-f25a899324f2-kube-api-access-l4l27\") pod \"64721e41-24b7-4492-825b-f25a899324f2\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.245049 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-config\") pod \"64721e41-24b7-4492-825b-f25a899324f2\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.245092 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-combined-ca-bundle\") pod \"64721e41-24b7-4492-825b-f25a899324f2\" (UID: \"64721e41-24b7-4492-825b-f25a899324f2\") " Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.251266 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64721e41-24b7-4492-825b-f25a899324f2-kube-api-access-l4l27" (OuterVolumeSpecName: "kube-api-access-l4l27") pod "64721e41-24b7-4492-825b-f25a899324f2" (UID: "64721e41-24b7-4492-825b-f25a899324f2"). InnerVolumeSpecName "kube-api-access-l4l27". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.269508 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-config" (OuterVolumeSpecName: "config") pod "64721e41-24b7-4492-825b-f25a899324f2" (UID: "64721e41-24b7-4492-825b-f25a899324f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.269599 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64721e41-24b7-4492-825b-f25a899324f2" (UID: "64721e41-24b7-4492-825b-f25a899324f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.347053 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4l27\" (UniqueName: \"kubernetes.io/projected/64721e41-24b7-4492-825b-f25a899324f2-kube-api-access-l4l27\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.347084 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.347096 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64721e41-24b7-4492-825b-f25a899324f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.805467 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2tc2v" event={"ID":"64721e41-24b7-4492-825b-f25a899324f2","Type":"ContainerDied","Data":"4b8560011da3921a31b4b83cb645045829d40be62be59963aac34ee1596d7644"} Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.806037 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b8560011da3921a31b4b83cb645045829d40be62be59963aac34ee1596d7644" Nov 25 16:00:09 crc kubenswrapper[4879]: I1125 16:00:09.805638 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2tc2v" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.054340 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b55c79499-8f2s5"] Nov 25 16:00:10 crc kubenswrapper[4879]: E1125 16:00:10.054851 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e3d9297-b45d-43a2-8169-4b5b4b7965dc" containerName="collect-profiles" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.054877 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e3d9297-b45d-43a2-8169-4b5b4b7965dc" containerName="collect-profiles" Nov 25 16:00:10 crc kubenswrapper[4879]: E1125 16:00:10.054900 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64721e41-24b7-4492-825b-f25a899324f2" containerName="neutron-db-sync" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.054909 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="64721e41-24b7-4492-825b-f25a899324f2" containerName="neutron-db-sync" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.055300 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e3d9297-b45d-43a2-8169-4b5b4b7965dc" containerName="collect-profiles" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.059301 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="64721e41-24b7-4492-825b-f25a899324f2" containerName="neutron-db-sync" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.060733 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.076391 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b55c79499-8f2s5"] Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.156236 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5d677fbdcc-5vh2w"] Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.157709 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.159050 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-nb\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.159150 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-config\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.159195 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-dns-svc\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.159248 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-sb\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.159381 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svcs5\" (UniqueName: \"kubernetes.io/projected/4eca7f50-07c5-49a2-b120-9643e129234b-kube-api-access-svcs5\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.162987 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.163111 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4rbqh" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.162945 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.174972 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5d677fbdcc-5vh2w"] Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.263671 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-config\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.263744 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-config\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.263811 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-dns-svc\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.263908 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-sb\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.264074 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svcs5\" (UniqueName: \"kubernetes.io/projected/4eca7f50-07c5-49a2-b120-9643e129234b-kube-api-access-svcs5\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.264140 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n988\" (UniqueName: \"kubernetes.io/projected/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-kube-api-access-4n988\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.264222 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-combined-ca-bundle\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.264249 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-nb\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.264278 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-httpd-config\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.265294 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-sb\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.265459 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-nb\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.265523 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-dns-svc\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.265624 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-config\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.282931 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svcs5\" (UniqueName: \"kubernetes.io/projected/4eca7f50-07c5-49a2-b120-9643e129234b-kube-api-access-svcs5\") pod \"dnsmasq-dns-b55c79499-8f2s5\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.366205 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n988\" (UniqueName: \"kubernetes.io/projected/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-kube-api-access-4n988\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.366268 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-combined-ca-bundle\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.366292 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-httpd-config\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.366340 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-config\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.370866 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-httpd-config\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.371899 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-combined-ca-bundle\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.374110 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-config\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.377919 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.387829 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n988\" (UniqueName: \"kubernetes.io/projected/871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a-kube-api-access-4n988\") pod \"neutron-5d677fbdcc-5vh2w\" (UID: \"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a\") " pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.490210 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.780835 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b55c79499-8f2s5"] Nov 25 16:00:10 crc kubenswrapper[4879]: I1125 16:00:10.817910 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" event={"ID":"4eca7f50-07c5-49a2-b120-9643e129234b","Type":"ContainerStarted","Data":"98d1a7cf9513658eaf8f80dc5dc782e6256d3a7b4b26800cae194e9e70dcfb18"} Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.237027 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5d677fbdcc-5vh2w"] Nov 25 16:00:11 crc kubenswrapper[4879]: W1125 16:00:11.238176 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod871a36ed_c8f0_4ad0_b8b7_0bb44f467c7a.slice/crio-85e2b29e6da0025e3b56260b8d623750a99b27aabafac0d0d24133f5f1e28cbf WatchSource:0}: Error finding container 85e2b29e6da0025e3b56260b8d623750a99b27aabafac0d0d24133f5f1e28cbf: Status 404 returned error can't find the container with id 85e2b29e6da0025e3b56260b8d623750a99b27aabafac0d0d24133f5f1e28cbf Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.827717 4879 generic.go:334] "Generic (PLEG): container finished" podID="4eca7f50-07c5-49a2-b120-9643e129234b" containerID="2b5b729238be0f38a8a0678d844458c0c97569f89e7e50b87d80e2173326a475" exitCode=0 Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.827813 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" event={"ID":"4eca7f50-07c5-49a2-b120-9643e129234b","Type":"ContainerDied","Data":"2b5b729238be0f38a8a0678d844458c0c97569f89e7e50b87d80e2173326a475"} Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.834795 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d677fbdcc-5vh2w" event={"ID":"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a","Type":"ContainerStarted","Data":"7760ef1cd3738a5d07d0cc438f3eb567dd6924ecec256589500ab083e1ebbfd0"} Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.834855 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d677fbdcc-5vh2w" event={"ID":"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a","Type":"ContainerStarted","Data":"111785ba8427660dd248d3d0ebcba092910142d559d85b011bcdd5478819fb20"} Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.834868 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5d677fbdcc-5vh2w" event={"ID":"871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a","Type":"ContainerStarted","Data":"85e2b29e6da0025e3b56260b8d623750a99b27aabafac0d0d24133f5f1e28cbf"} Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.835324 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:11 crc kubenswrapper[4879]: I1125 16:00:11.875809 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5d677fbdcc-5vh2w" podStartSLOduration=1.875788705 podStartE2EDuration="1.875788705s" podCreationTimestamp="2025-11-25 16:00:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:00:11.874513871 +0000 UTC m=+5703.477926952" watchObservedRunningTime="2025-11-25 16:00:11.875788705 +0000 UTC m=+5703.479201776" Nov 25 16:00:12 crc kubenswrapper[4879]: I1125 16:00:12.844937 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" event={"ID":"4eca7f50-07c5-49a2-b120-9643e129234b","Type":"ContainerStarted","Data":"9bdb997481673af4eeb5e3b79066f6fa8e46f62d87f822558bc8c0e760b00b62"} Nov 25 16:00:12 crc kubenswrapper[4879]: I1125 16:00:12.864010 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" podStartSLOduration=2.863995187 podStartE2EDuration="2.863995187s" podCreationTimestamp="2025-11-25 16:00:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:00:12.863874674 +0000 UTC m=+5704.467287755" watchObservedRunningTime="2025-11-25 16:00:12.863995187 +0000 UTC m=+5704.467408258" Nov 25 16:00:13 crc kubenswrapper[4879]: I1125 16:00:13.854459 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:17 crc kubenswrapper[4879]: I1125 16:00:17.409137 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:00:17 crc kubenswrapper[4879]: I1125 16:00:17.409437 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:00:17 crc kubenswrapper[4879]: E1125 16:00:17.974572 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice\": RecentStats: unable to find data in memory cache]" Nov 25 16:00:20 crc kubenswrapper[4879]: I1125 16:00:20.379259 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:00:20 crc kubenswrapper[4879]: I1125 16:00:20.433325 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6c8f9c5c-khbz6"] Nov 25 16:00:20 crc kubenswrapper[4879]: I1125 16:00:20.433572 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerName="dnsmasq-dns" containerID="cri-o://2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e" gracePeriod=10 Nov 25 16:00:20 crc kubenswrapper[4879]: I1125 16:00:20.922660 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.061292 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-config\") pod \"9f0651c9-9e94-4c89-9247-38d646047f5d\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.061384 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hzv2\" (UniqueName: \"kubernetes.io/projected/9f0651c9-9e94-4c89-9247-38d646047f5d-kube-api-access-7hzv2\") pod \"9f0651c9-9e94-4c89-9247-38d646047f5d\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.061466 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-dns-svc\") pod \"9f0651c9-9e94-4c89-9247-38d646047f5d\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.061487 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-sb\") pod \"9f0651c9-9e94-4c89-9247-38d646047f5d\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.062100 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-nb\") pod \"9f0651c9-9e94-4c89-9247-38d646047f5d\" (UID: \"9f0651c9-9e94-4c89-9247-38d646047f5d\") " Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.067772 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f0651c9-9e94-4c89-9247-38d646047f5d-kube-api-access-7hzv2" (OuterVolumeSpecName: "kube-api-access-7hzv2") pod "9f0651c9-9e94-4c89-9247-38d646047f5d" (UID: "9f0651c9-9e94-4c89-9247-38d646047f5d"). InnerVolumeSpecName "kube-api-access-7hzv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.106002 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f0651c9-9e94-4c89-9247-38d646047f5d" (UID: "9f0651c9-9e94-4c89-9247-38d646047f5d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.106888 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-config" (OuterVolumeSpecName: "config") pod "9f0651c9-9e94-4c89-9247-38d646047f5d" (UID: "9f0651c9-9e94-4c89-9247-38d646047f5d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.108580 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f0651c9-9e94-4c89-9247-38d646047f5d" (UID: "9f0651c9-9e94-4c89-9247-38d646047f5d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.112507 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f0651c9-9e94-4c89-9247-38d646047f5d" (UID: "9f0651c9-9e94-4c89-9247-38d646047f5d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.164643 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hzv2\" (UniqueName: \"kubernetes.io/projected/9f0651c9-9e94-4c89-9247-38d646047f5d-kube-api-access-7hzv2\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.164698 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.164712 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.164727 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.164740 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f0651c9-9e94-4c89-9247-38d646047f5d-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.206047 4879 generic.go:334] "Generic (PLEG): container finished" podID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerID="2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e" exitCode=0 Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.206095 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.206148 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" event={"ID":"9f0651c9-9e94-4c89-9247-38d646047f5d","Type":"ContainerDied","Data":"2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e"} Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.206255 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c8f9c5c-khbz6" event={"ID":"9f0651c9-9e94-4c89-9247-38d646047f5d","Type":"ContainerDied","Data":"0327a69c7d08cd77eb70ceb0189a434b473be5f9ab0a1c966043f43494b6c360"} Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.207199 4879 scope.go:117] "RemoveContainer" containerID="2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.228328 4879 scope.go:117] "RemoveContainer" containerID="f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.271259 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6c8f9c5c-khbz6"] Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.279865 4879 scope.go:117] "RemoveContainer" containerID="2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.279806 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6c8f9c5c-khbz6"] Nov 25 16:00:21 crc kubenswrapper[4879]: E1125 16:00:21.282109 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e\": container with ID starting with 2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e not found: ID does not exist" containerID="2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.282159 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e"} err="failed to get container status \"2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e\": rpc error: code = NotFound desc = could not find container \"2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e\": container with ID starting with 2b8501fafdb5601059338632bbeff4bd6cc4d5278a7b2863310b115e8683020e not found: ID does not exist" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.282186 4879 scope.go:117] "RemoveContainer" containerID="f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7" Nov 25 16:00:21 crc kubenswrapper[4879]: E1125 16:00:21.282660 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7\": container with ID starting with f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7 not found: ID does not exist" containerID="f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.282702 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7"} err="failed to get container status \"f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7\": rpc error: code = NotFound desc = could not find container \"f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7\": container with ID starting with f3b80424a3814ced6e50772f0cc1a74fc3b551b6696b48a5fd2e6091de6524c7 not found: ID does not exist" Nov 25 16:00:21 crc kubenswrapper[4879]: I1125 16:00:21.656652 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" path="/var/lib/kubelet/pods/9f0651c9-9e94-4c89-9247-38d646047f5d/volumes" Nov 25 16:00:28 crc kubenswrapper[4879]: E1125 16:00:28.171701 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice\": RecentStats: unable to find data in memory cache]" Nov 25 16:00:38 crc kubenswrapper[4879]: E1125 16:00:38.380917 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice\": RecentStats: unable to find data in memory cache]" Nov 25 16:00:40 crc kubenswrapper[4879]: I1125 16:00:40.501922 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5d677fbdcc-5vh2w" Nov 25 16:00:43 crc kubenswrapper[4879]: I1125 16:00:43.385307 4879 scope.go:117] "RemoveContainer" containerID="f8d19dc2d3bcf3d4780d842233ea695dc8c6ca07772ea232477e3e9c41c291df" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.409028 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.410119 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.975854 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-tnxpp"] Nov 25 16:00:47 crc kubenswrapper[4879]: E1125 16:00:47.976218 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerName="dnsmasq-dns" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.976231 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerName="dnsmasq-dns" Nov 25 16:00:47 crc kubenswrapper[4879]: E1125 16:00:47.976254 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerName="init" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.976262 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerName="init" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.976440 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f0651c9-9e94-4c89-9247-38d646047f5d" containerName="dnsmasq-dns" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.978705 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:47 crc kubenswrapper[4879]: I1125 16:00:47.988369 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tnxpp"] Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.033697 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nrwc\" (UniqueName: \"kubernetes.io/projected/a66d36f1-0990-49b9-acbb-70adf1b0567d-kube-api-access-6nrwc\") pod \"glance-db-create-tnxpp\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.033774 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66d36f1-0990-49b9-acbb-70adf1b0567d-operator-scripts\") pod \"glance-db-create-tnxpp\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.045691 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-e0b3-account-create-6xh9f"] Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.047599 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.049758 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.053068 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-e0b3-account-create-6xh9f"] Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.137991 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a565eaa-22b7-4c01-a928-1f47d507d05c-operator-scripts\") pod \"glance-e0b3-account-create-6xh9f\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.138063 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nrwc\" (UniqueName: \"kubernetes.io/projected/a66d36f1-0990-49b9-acbb-70adf1b0567d-kube-api-access-6nrwc\") pod \"glance-db-create-tnxpp\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.138093 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66d36f1-0990-49b9-acbb-70adf1b0567d-operator-scripts\") pod \"glance-db-create-tnxpp\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.138189 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc6s8\" (UniqueName: \"kubernetes.io/projected/1a565eaa-22b7-4c01-a928-1f47d507d05c-kube-api-access-kc6s8\") pod \"glance-e0b3-account-create-6xh9f\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.139228 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66d36f1-0990-49b9-acbb-70adf1b0567d-operator-scripts\") pod \"glance-db-create-tnxpp\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.155523 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nrwc\" (UniqueName: \"kubernetes.io/projected/a66d36f1-0990-49b9-acbb-70adf1b0567d-kube-api-access-6nrwc\") pod \"glance-db-create-tnxpp\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.239382 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a565eaa-22b7-4c01-a928-1f47d507d05c-operator-scripts\") pod \"glance-e0b3-account-create-6xh9f\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.239711 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc6s8\" (UniqueName: \"kubernetes.io/projected/1a565eaa-22b7-4c01-a928-1f47d507d05c-kube-api-access-kc6s8\") pod \"glance-e0b3-account-create-6xh9f\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.240290 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a565eaa-22b7-4c01-a928-1f47d507d05c-operator-scripts\") pod \"glance-e0b3-account-create-6xh9f\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.259432 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc6s8\" (UniqueName: \"kubernetes.io/projected/1a565eaa-22b7-4c01-a928-1f47d507d05c-kube-api-access-kc6s8\") pod \"glance-e0b3-account-create-6xh9f\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.320681 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.364369 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:48 crc kubenswrapper[4879]: E1125 16:00:48.620690 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice\": RecentStats: unable to find data in memory cache]" Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.669227 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tnxpp"] Nov 25 16:00:48 crc kubenswrapper[4879]: I1125 16:00:48.960625 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-e0b3-account-create-6xh9f"] Nov 25 16:00:48 crc kubenswrapper[4879]: W1125 16:00:48.962238 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a565eaa_22b7_4c01_a928_1f47d507d05c.slice/crio-b98591266d2ad10fc861e8cb8faea303ed37395f6707c4f2d682e7cf54a59f48 WatchSource:0}: Error finding container b98591266d2ad10fc861e8cb8faea303ed37395f6707c4f2d682e7cf54a59f48: Status 404 returned error can't find the container with id b98591266d2ad10fc861e8cb8faea303ed37395f6707c4f2d682e7cf54a59f48 Nov 25 16:00:49 crc kubenswrapper[4879]: I1125 16:00:49.475665 4879 generic.go:334] "Generic (PLEG): container finished" podID="a66d36f1-0990-49b9-acbb-70adf1b0567d" containerID="640b4a564a454df72f410b090d73a51aee71874ab7b9e5edbab39feebe993720" exitCode=0 Nov 25 16:00:49 crc kubenswrapper[4879]: I1125 16:00:49.475755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tnxpp" event={"ID":"a66d36f1-0990-49b9-acbb-70adf1b0567d","Type":"ContainerDied","Data":"640b4a564a454df72f410b090d73a51aee71874ab7b9e5edbab39feebe993720"} Nov 25 16:00:49 crc kubenswrapper[4879]: I1125 16:00:49.475794 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tnxpp" event={"ID":"a66d36f1-0990-49b9-acbb-70adf1b0567d","Type":"ContainerStarted","Data":"c643e4f14de3e6bd5350436d7f6adbe74383ce9643fd639d26cddc2871617310"} Nov 25 16:00:49 crc kubenswrapper[4879]: I1125 16:00:49.477715 4879 generic.go:334] "Generic (PLEG): container finished" podID="1a565eaa-22b7-4c01-a928-1f47d507d05c" containerID="135a0c5d0f70e788a68d3b450162be7a3dfa02f4c0f2ff410c6332e24a77ea3c" exitCode=0 Nov 25 16:00:49 crc kubenswrapper[4879]: I1125 16:00:49.477759 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e0b3-account-create-6xh9f" event={"ID":"1a565eaa-22b7-4c01-a928-1f47d507d05c","Type":"ContainerDied","Data":"135a0c5d0f70e788a68d3b450162be7a3dfa02f4c0f2ff410c6332e24a77ea3c"} Nov 25 16:00:49 crc kubenswrapper[4879]: I1125 16:00:49.477787 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e0b3-account-create-6xh9f" event={"ID":"1a565eaa-22b7-4c01-a928-1f47d507d05c","Type":"ContainerStarted","Data":"b98591266d2ad10fc861e8cb8faea303ed37395f6707c4f2d682e7cf54a59f48"} Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.779335 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.888981 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a565eaa-22b7-4c01-a928-1f47d507d05c-operator-scripts\") pod \"1a565eaa-22b7-4c01-a928-1f47d507d05c\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.889059 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc6s8\" (UniqueName: \"kubernetes.io/projected/1a565eaa-22b7-4c01-a928-1f47d507d05c-kube-api-access-kc6s8\") pod \"1a565eaa-22b7-4c01-a928-1f47d507d05c\" (UID: \"1a565eaa-22b7-4c01-a928-1f47d507d05c\") " Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.889913 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a565eaa-22b7-4c01-a928-1f47d507d05c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a565eaa-22b7-4c01-a928-1f47d507d05c" (UID: "1a565eaa-22b7-4c01-a928-1f47d507d05c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.890798 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.895886 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a565eaa-22b7-4c01-a928-1f47d507d05c-kube-api-access-kc6s8" (OuterVolumeSpecName: "kube-api-access-kc6s8") pod "1a565eaa-22b7-4c01-a928-1f47d507d05c" (UID: "1a565eaa-22b7-4c01-a928-1f47d507d05c"). InnerVolumeSpecName "kube-api-access-kc6s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.990402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66d36f1-0990-49b9-acbb-70adf1b0567d-operator-scripts\") pod \"a66d36f1-0990-49b9-acbb-70adf1b0567d\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.990467 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nrwc\" (UniqueName: \"kubernetes.io/projected/a66d36f1-0990-49b9-acbb-70adf1b0567d-kube-api-access-6nrwc\") pod \"a66d36f1-0990-49b9-acbb-70adf1b0567d\" (UID: \"a66d36f1-0990-49b9-acbb-70adf1b0567d\") " Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.990917 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc6s8\" (UniqueName: \"kubernetes.io/projected/1a565eaa-22b7-4c01-a928-1f47d507d05c-kube-api-access-kc6s8\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.990941 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a565eaa-22b7-4c01-a928-1f47d507d05c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.990949 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a66d36f1-0990-49b9-acbb-70adf1b0567d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a66d36f1-0990-49b9-acbb-70adf1b0567d" (UID: "a66d36f1-0990-49b9-acbb-70adf1b0567d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:00:50 crc kubenswrapper[4879]: I1125 16:00:50.994344 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a66d36f1-0990-49b9-acbb-70adf1b0567d-kube-api-access-6nrwc" (OuterVolumeSpecName: "kube-api-access-6nrwc") pod "a66d36f1-0990-49b9-acbb-70adf1b0567d" (UID: "a66d36f1-0990-49b9-acbb-70adf1b0567d"). InnerVolumeSpecName "kube-api-access-6nrwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.093330 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66d36f1-0990-49b9-acbb-70adf1b0567d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.093373 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nrwc\" (UniqueName: \"kubernetes.io/projected/a66d36f1-0990-49b9-acbb-70adf1b0567d-kube-api-access-6nrwc\") on node \"crc\" DevicePath \"\"" Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.499019 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tnxpp" Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.499228 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tnxpp" event={"ID":"a66d36f1-0990-49b9-acbb-70adf1b0567d","Type":"ContainerDied","Data":"c643e4f14de3e6bd5350436d7f6adbe74383ce9643fd639d26cddc2871617310"} Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.499268 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c643e4f14de3e6bd5350436d7f6adbe74383ce9643fd639d26cddc2871617310" Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.501465 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-e0b3-account-create-6xh9f" event={"ID":"1a565eaa-22b7-4c01-a928-1f47d507d05c","Type":"ContainerDied","Data":"b98591266d2ad10fc861e8cb8faea303ed37395f6707c4f2d682e7cf54a59f48"} Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.501498 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b98591266d2ad10fc861e8cb8faea303ed37395f6707c4f2d682e7cf54a59f48" Nov 25 16:00:51 crc kubenswrapper[4879]: I1125 16:00:51.501546 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-e0b3-account-create-6xh9f" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.349633 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-7dhtd"] Nov 25 16:00:53 crc kubenswrapper[4879]: E1125 16:00:53.350334 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66d36f1-0990-49b9-acbb-70adf1b0567d" containerName="mariadb-database-create" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.350348 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66d36f1-0990-49b9-acbb-70adf1b0567d" containerName="mariadb-database-create" Nov 25 16:00:53 crc kubenswrapper[4879]: E1125 16:00:53.350386 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a565eaa-22b7-4c01-a928-1f47d507d05c" containerName="mariadb-account-create" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.350393 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a565eaa-22b7-4c01-a928-1f47d507d05c" containerName="mariadb-account-create" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.350576 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a565eaa-22b7-4c01-a928-1f47d507d05c" containerName="mariadb-account-create" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.350594 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66d36f1-0990-49b9-acbb-70adf1b0567d" containerName="mariadb-database-create" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.351163 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.353614 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-cpqsg" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.354337 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.372224 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-7dhtd"] Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.435879 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-combined-ca-bundle\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.435967 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-db-sync-config-data\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.436014 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-config-data\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.436069 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksklg\" (UniqueName: \"kubernetes.io/projected/a44596ad-b5bf-4132-b099-ed1ac235324f-kube-api-access-ksklg\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.538022 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksklg\" (UniqueName: \"kubernetes.io/projected/a44596ad-b5bf-4132-b099-ed1ac235324f-kube-api-access-ksklg\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.538167 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-combined-ca-bundle\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.538190 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-db-sync-config-data\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.538215 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-config-data\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.542214 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-db-sync-config-data\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.542436 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-combined-ca-bundle\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.543086 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-config-data\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.555515 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksklg\" (UniqueName: \"kubernetes.io/projected/a44596ad-b5bf-4132-b099-ed1ac235324f-kube-api-access-ksklg\") pod \"glance-db-sync-7dhtd\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:53 crc kubenswrapper[4879]: I1125 16:00:53.669308 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7dhtd" Nov 25 16:00:54 crc kubenswrapper[4879]: I1125 16:00:54.157348 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-7dhtd"] Nov 25 16:00:54 crc kubenswrapper[4879]: I1125 16:00:54.530355 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7dhtd" event={"ID":"a44596ad-b5bf-4132-b099-ed1ac235324f","Type":"ContainerStarted","Data":"2f503b1cb7550c41c54da922bd29fb526cad8dac10b1cf00f96491ebefce12c4"} Nov 25 16:00:55 crc kubenswrapper[4879]: I1125 16:00:55.541407 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7dhtd" event={"ID":"a44596ad-b5bf-4132-b099-ed1ac235324f","Type":"ContainerStarted","Data":"26cd97ce9d7ee86b17051d1bf10c7e31999b15085f6304e4c01d5174a2d5a309"} Nov 25 16:00:55 crc kubenswrapper[4879]: I1125 16:00:55.566668 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-7dhtd" podStartSLOduration=2.566643148 podStartE2EDuration="2.566643148s" podCreationTimestamp="2025-11-25 16:00:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:00:55.560179033 +0000 UTC m=+5747.163592114" watchObservedRunningTime="2025-11-25 16:00:55.566643148 +0000 UTC m=+5747.170056219" Nov 25 16:00:58 crc kubenswrapper[4879]: I1125 16:00:58.570248 4879 generic.go:334] "Generic (PLEG): container finished" podID="a44596ad-b5bf-4132-b099-ed1ac235324f" containerID="26cd97ce9d7ee86b17051d1bf10c7e31999b15085f6304e4c01d5174a2d5a309" exitCode=0 Nov 25 16:00:58 crc kubenswrapper[4879]: I1125 16:00:58.570355 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7dhtd" event={"ID":"a44596ad-b5bf-4132-b099-ed1ac235324f","Type":"ContainerDied","Data":"26cd97ce9d7ee86b17051d1bf10c7e31999b15085f6304e4c01d5174a2d5a309"} Nov 25 16:00:58 crc kubenswrapper[4879]: E1125 16:00:58.844560 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice\": RecentStats: unable to find data in memory cache]" Nov 25 16:00:59 crc kubenswrapper[4879]: I1125 16:00:59.967966 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7dhtd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.062922 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-combined-ca-bundle\") pod \"a44596ad-b5bf-4132-b099-ed1ac235324f\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.063355 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-db-sync-config-data\") pod \"a44596ad-b5bf-4132-b099-ed1ac235324f\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.063394 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksklg\" (UniqueName: \"kubernetes.io/projected/a44596ad-b5bf-4132-b099-ed1ac235324f-kube-api-access-ksklg\") pod \"a44596ad-b5bf-4132-b099-ed1ac235324f\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.063507 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-config-data\") pod \"a44596ad-b5bf-4132-b099-ed1ac235324f\" (UID: \"a44596ad-b5bf-4132-b099-ed1ac235324f\") " Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.069740 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a44596ad-b5bf-4132-b099-ed1ac235324f-kube-api-access-ksklg" (OuterVolumeSpecName: "kube-api-access-ksklg") pod "a44596ad-b5bf-4132-b099-ed1ac235324f" (UID: "a44596ad-b5bf-4132-b099-ed1ac235324f"). InnerVolumeSpecName "kube-api-access-ksklg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.069952 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a44596ad-b5bf-4132-b099-ed1ac235324f" (UID: "a44596ad-b5bf-4132-b099-ed1ac235324f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.091317 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a44596ad-b5bf-4132-b099-ed1ac235324f" (UID: "a44596ad-b5bf-4132-b099-ed1ac235324f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.110697 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-config-data" (OuterVolumeSpecName: "config-data") pod "a44596ad-b5bf-4132-b099-ed1ac235324f" (UID: "a44596ad-b5bf-4132-b099-ed1ac235324f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.134198 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29401441-zjmkd"] Nov 25 16:01:00 crc kubenswrapper[4879]: E1125 16:01:00.134647 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44596ad-b5bf-4132-b099-ed1ac235324f" containerName="glance-db-sync" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.134670 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44596ad-b5bf-4132-b099-ed1ac235324f" containerName="glance-db-sync" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.134914 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44596ad-b5bf-4132-b099-ed1ac235324f" containerName="glance-db-sync" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.137676 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.145295 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29401441-zjmkd"] Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.165831 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-fernet-keys\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.165916 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-combined-ca-bundle\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.166007 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-config-data\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.166105 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcmxr\" (UniqueName: \"kubernetes.io/projected/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-kube-api-access-rcmxr\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.166189 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.166207 4879 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.166243 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksklg\" (UniqueName: \"kubernetes.io/projected/a44596ad-b5bf-4132-b099-ed1ac235324f-kube-api-access-ksklg\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.166257 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a44596ad-b5bf-4132-b099-ed1ac235324f-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.267656 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-config-data\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.267760 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcmxr\" (UniqueName: \"kubernetes.io/projected/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-kube-api-access-rcmxr\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.267853 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-fernet-keys\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.267926 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-combined-ca-bundle\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.271405 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-fernet-keys\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.271612 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-combined-ca-bundle\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.272562 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-config-data\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.285845 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcmxr\" (UniqueName: \"kubernetes.io/projected/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-kube-api-access-rcmxr\") pod \"keystone-cron-29401441-zjmkd\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.505431 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.593200 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-7dhtd" event={"ID":"a44596ad-b5bf-4132-b099-ed1ac235324f","Type":"ContainerDied","Data":"2f503b1cb7550c41c54da922bd29fb526cad8dac10b1cf00f96491ebefce12c4"} Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.593520 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f503b1cb7550c41c54da922bd29fb526cad8dac10b1cf00f96491ebefce12c4" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.593593 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-7dhtd" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.856691 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.858914 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.869330 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.876329 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-cpqsg" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.876541 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.876618 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877020 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-config-data\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877112 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877230 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-ceph\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877262 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-logs\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877294 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877356 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-scripts\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.877382 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fzgd\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-kube-api-access-4fzgd\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.884704 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.968076 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29401441-zjmkd"] Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982246 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-config-data\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982309 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-ceph\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982426 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-logs\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982458 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982491 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-scripts\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.982517 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fzgd\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-kube-api-access-4fzgd\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.984030 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-logs\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.984324 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.994648 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.994767 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-config-data\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:00 crc kubenswrapper[4879]: I1125 16:01:00.999204 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-scripts\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.001543 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-ceph\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.015857 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fzgd\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-kube-api-access-4fzgd\") pod \"glance-default-external-api-0\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.041048 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6c7b5c97-89xwp"] Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.043153 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.062355 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6c7b5c97-89xwp"] Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.086900 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-config\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.086965 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.087036 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.087065 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-dns-svc\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.087082 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxbr7\" (UniqueName: \"kubernetes.io/projected/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-kube-api-access-qxbr7\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.114692 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.116444 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.119226 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.129599 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188473 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8wsh\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-kube-api-access-q8wsh\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188542 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188568 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxbr7\" (UniqueName: \"kubernetes.io/projected/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-kube-api-access-qxbr7\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188591 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-dns-svc\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188679 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-scripts\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188712 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-config-data\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188744 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-ceph\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188775 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-config\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188824 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188857 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188881 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.188952 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-logs\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.190153 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.190863 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-dns-svc\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.190955 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.191470 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-config\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.216943 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxbr7\" (UniqueName: \"kubernetes.io/projected/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-kube-api-access-qxbr7\") pod \"dnsmasq-dns-6c6c7b5c97-89xwp\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.248812 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290685 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-scripts\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290736 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-config-data\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290765 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-ceph\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290837 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290883 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-logs\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.290912 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8wsh\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-kube-api-access-q8wsh\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.291981 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.292016 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-logs\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.296257 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.297055 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-scripts\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.297080 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-config-data\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.298507 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-ceph\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.310579 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8wsh\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-kube-api-access-q8wsh\") pod \"glance-default-internal-api-0\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.500943 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.509671 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.648358 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401441-zjmkd" event={"ID":"1c9d5fb6-90b7-44ee-a019-df27b5c22eef","Type":"ContainerStarted","Data":"39672c824d7c23fb830a3bc32f163889dc2c39a98b69c5fc54ad124127d6dac3"} Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.648418 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401441-zjmkd" event={"ID":"1c9d5fb6-90b7-44ee-a019-df27b5c22eef","Type":"ContainerStarted","Data":"a76d00e7e0ab24409bf1a349d819707708193997371d11f6467f1233e25252d1"} Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.689752 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29401441-zjmkd" podStartSLOduration=1.689727919 podStartE2EDuration="1.689727919s" podCreationTimestamp="2025-11-25 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:01.688583368 +0000 UTC m=+5753.291996449" watchObservedRunningTime="2025-11-25 16:01:01.689727919 +0000 UTC m=+5753.293141020" Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.841678 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:01 crc kubenswrapper[4879]: I1125 16:01:01.992137 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6c7b5c97-89xwp"] Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.195473 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.239207 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.661751 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"321d2e4f-c5a1-4566-b608-ad5b58de92de","Type":"ContainerStarted","Data":"77c47ea18180d9bc3795d2d4e87e40357a271503271ad4761edcb7c0935336af"} Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.664364 4879 generic.go:334] "Generic (PLEG): container finished" podID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerID="0309c0ad8552798355a722b2d9835bb9585aa73c6678510e08e2972d2022ba17" exitCode=0 Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.664408 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" event={"ID":"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4","Type":"ContainerDied","Data":"0309c0ad8552798355a722b2d9835bb9585aa73c6678510e08e2972d2022ba17"} Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.664438 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" event={"ID":"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4","Type":"ContainerStarted","Data":"333e4580213d40c293826f7b3f55c8b53f0eed179b1f1f8dac3f1229ea87e35f"} Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.668073 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7524008d-2314-4f4e-a984-d64e5e0fedd6","Type":"ContainerStarted","Data":"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126"} Nov 25 16:01:02 crc kubenswrapper[4879]: I1125 16:01:02.668144 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7524008d-2314-4f4e-a984-d64e5e0fedd6","Type":"ContainerStarted","Data":"0ef78b1004ef6b0b191a04a33531697d0d6add9b0ab590f25e40c4578cf63673"} Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.679108 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" event={"ID":"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4","Type":"ContainerStarted","Data":"4a5af38f4fd63a331c2146c8889a6d2a137fda52def4f2148d6193ff143256fe"} Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.679735 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.681980 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7524008d-2314-4f4e-a984-d64e5e0fedd6","Type":"ContainerStarted","Data":"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0"} Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.682184 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-log" containerID="cri-o://c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126" gracePeriod=30 Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.682440 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-httpd" containerID="cri-o://46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0" gracePeriod=30 Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.687530 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"321d2e4f-c5a1-4566-b608-ad5b58de92de","Type":"ContainerStarted","Data":"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f"} Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.687592 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"321d2e4f-c5a1-4566-b608-ad5b58de92de","Type":"ContainerStarted","Data":"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff"} Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.691017 4879 generic.go:334] "Generic (PLEG): container finished" podID="1c9d5fb6-90b7-44ee-a019-df27b5c22eef" containerID="39672c824d7c23fb830a3bc32f163889dc2c39a98b69c5fc54ad124127d6dac3" exitCode=0 Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.691066 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401441-zjmkd" event={"ID":"1c9d5fb6-90b7-44ee-a019-df27b5c22eef","Type":"ContainerDied","Data":"39672c824d7c23fb830a3bc32f163889dc2c39a98b69c5fc54ad124127d6dac3"} Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.703266 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" podStartSLOduration=2.703244132 podStartE2EDuration="2.703244132s" podCreationTimestamp="2025-11-25 16:01:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:03.702314878 +0000 UTC m=+5755.305727969" watchObservedRunningTime="2025-11-25 16:01:03.703244132 +0000 UTC m=+5755.306657233" Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.726232 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.726210371 podStartE2EDuration="3.726210371s" podCreationTimestamp="2025-11-25 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:03.721483543 +0000 UTC m=+5755.324896614" watchObservedRunningTime="2025-11-25 16:01:03.726210371 +0000 UTC m=+5755.329623442" Nov 25 16:01:03 crc kubenswrapper[4879]: I1125 16:01:03.755792 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.7557650259999997 podStartE2EDuration="2.755765026s" podCreationTimestamp="2025-11-25 16:01:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:03.748806819 +0000 UTC m=+5755.352219890" watchObservedRunningTime="2025-11-25 16:01:03.755765026 +0000 UTC m=+5755.359178097" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.131535 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.363793 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.457572 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fzgd\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-kube-api-access-4fzgd\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.457780 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-httpd-run\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.457809 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-logs\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.458085 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.458112 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-logs" (OuterVolumeSpecName: "logs") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.458168 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-config-data\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.458210 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-ceph\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.458667 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-combined-ca-bundle\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.458706 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-scripts\") pod \"7524008d-2314-4f4e-a984-d64e5e0fedd6\" (UID: \"7524008d-2314-4f4e-a984-d64e5e0fedd6\") " Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.459179 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.459197 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7524008d-2314-4f4e-a984-d64e5e0fedd6-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.464018 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-ceph" (OuterVolumeSpecName: "ceph") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.464224 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-scripts" (OuterVolumeSpecName: "scripts") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.465302 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-kube-api-access-4fzgd" (OuterVolumeSpecName: "kube-api-access-4fzgd") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "kube-api-access-4fzgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.485486 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.507495 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-config-data" (OuterVolumeSpecName: "config-data") pod "7524008d-2314-4f4e-a984-d64e5e0fedd6" (UID: "7524008d-2314-4f4e-a984-d64e5e0fedd6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.560807 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.560843 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.560852 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.560863 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7524008d-2314-4f4e-a984-d64e5e0fedd6-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.560872 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fzgd\" (UniqueName: \"kubernetes.io/projected/7524008d-2314-4f4e-a984-d64e5e0fedd6-kube-api-access-4fzgd\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.700633 4879 generic.go:334] "Generic (PLEG): container finished" podID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerID="46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0" exitCode=0 Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.700666 4879 generic.go:334] "Generic (PLEG): container finished" podID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerID="c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126" exitCode=143 Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.700683 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.701710 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7524008d-2314-4f4e-a984-d64e5e0fedd6","Type":"ContainerDied","Data":"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0"} Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.701830 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7524008d-2314-4f4e-a984-d64e5e0fedd6","Type":"ContainerDied","Data":"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126"} Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.701905 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7524008d-2314-4f4e-a984-d64e5e0fedd6","Type":"ContainerDied","Data":"0ef78b1004ef6b0b191a04a33531697d0d6add9b0ab590f25e40c4578cf63673"} Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.701858 4879 scope.go:117] "RemoveContainer" containerID="46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.733599 4879 scope.go:117] "RemoveContainer" containerID="c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.739715 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.774226 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.784538 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:04 crc kubenswrapper[4879]: E1125 16:01:04.784974 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-log" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.784997 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-log" Nov 25 16:01:04 crc kubenswrapper[4879]: E1125 16:01:04.785043 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-httpd" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.785050 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-httpd" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.785226 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-log" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.785242 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" containerName="glance-httpd" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.786343 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.789153 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.792869 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.803980 4879 scope.go:117] "RemoveContainer" containerID="46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0" Nov 25 16:01:04 crc kubenswrapper[4879]: E1125 16:01:04.810978 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0\": container with ID starting with 46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0 not found: ID does not exist" containerID="46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.811033 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0"} err="failed to get container status \"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0\": rpc error: code = NotFound desc = could not find container \"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0\": container with ID starting with 46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0 not found: ID does not exist" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.811069 4879 scope.go:117] "RemoveContainer" containerID="c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126" Nov 25 16:01:04 crc kubenswrapper[4879]: E1125 16:01:04.812567 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126\": container with ID starting with c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126 not found: ID does not exist" containerID="c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.812632 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126"} err="failed to get container status \"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126\": rpc error: code = NotFound desc = could not find container \"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126\": container with ID starting with c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126 not found: ID does not exist" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.812667 4879 scope.go:117] "RemoveContainer" containerID="46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.813023 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0"} err="failed to get container status \"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0\": rpc error: code = NotFound desc = could not find container \"46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0\": container with ID starting with 46bf5a0a9ba191152e10e848c20ee0f07801206595a74470d130e4b86faaeec0 not found: ID does not exist" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.813042 4879 scope.go:117] "RemoveContainer" containerID="c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.813325 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126"} err="failed to get container status \"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126\": rpc error: code = NotFound desc = could not find container \"c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126\": container with ID starting with c9e6b37812e26c489376c595cd55da66d32fea60610b992acd92e18a7a0d0126 not found: ID does not exist" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.867447 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw4jm\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-kube-api-access-pw4jm\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.867509 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-scripts\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.867594 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-ceph\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.867634 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.867727 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-logs\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.868612 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-config-data\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.868669 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.970900 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-config-data\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.970947 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971025 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw4jm\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-kube-api-access-pw4jm\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971054 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-scripts\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971094 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-ceph\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971114 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971182 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-logs\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971694 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-logs\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.971879 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.976099 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.976148 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-ceph\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.978485 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-config-data\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.978806 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-scripts\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:04 crc kubenswrapper[4879]: I1125 16:01:04.997655 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw4jm\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-kube-api-access-pw4jm\") pod \"glance-default-external-api-0\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " pod="openstack/glance-default-external-api-0" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.075696 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.120532 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.174203 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-fernet-keys\") pod \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.174608 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-config-data\") pod \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.174675 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-combined-ca-bundle\") pod \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.174705 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcmxr\" (UniqueName: \"kubernetes.io/projected/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-kube-api-access-rcmxr\") pod \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\" (UID: \"1c9d5fb6-90b7-44ee-a019-df27b5c22eef\") " Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.179280 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1c9d5fb6-90b7-44ee-a019-df27b5c22eef" (UID: "1c9d5fb6-90b7-44ee-a019-df27b5c22eef"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.179402 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-kube-api-access-rcmxr" (OuterVolumeSpecName: "kube-api-access-rcmxr") pod "1c9d5fb6-90b7-44ee-a019-df27b5c22eef" (UID: "1c9d5fb6-90b7-44ee-a019-df27b5c22eef"). InnerVolumeSpecName "kube-api-access-rcmxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.209410 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c9d5fb6-90b7-44ee-a019-df27b5c22eef" (UID: "1c9d5fb6-90b7-44ee-a019-df27b5c22eef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.222009 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-config-data" (OuterVolumeSpecName: "config-data") pod "1c9d5fb6-90b7-44ee-a019-df27b5c22eef" (UID: "1c9d5fb6-90b7-44ee-a019-df27b5c22eef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.276872 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.276916 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.276935 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.276950 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcmxr\" (UniqueName: \"kubernetes.io/projected/1c9d5fb6-90b7-44ee-a019-df27b5c22eef-kube-api-access-rcmxr\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.636985 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:01:05 crc kubenswrapper[4879]: W1125 16:01:05.639964 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9afebe2_9a18_46a4_b728_184544602da1.slice/crio-b3ee5ac96f9be918daa7e2891e30d0d0975a11793187423fa2187eac17dcc535 WatchSource:0}: Error finding container b3ee5ac96f9be918daa7e2891e30d0d0975a11793187423fa2187eac17dcc535: Status 404 returned error can't find the container with id b3ee5ac96f9be918daa7e2891e30d0d0975a11793187423fa2187eac17dcc535 Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.665362 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7524008d-2314-4f4e-a984-d64e5e0fedd6" path="/var/lib/kubelet/pods/7524008d-2314-4f4e-a984-d64e5e0fedd6/volumes" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.710590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d9afebe2-9a18-46a4-b728-184544602da1","Type":"ContainerStarted","Data":"b3ee5ac96f9be918daa7e2891e30d0d0975a11793187423fa2187eac17dcc535"} Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.714350 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401441-zjmkd" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.714378 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401441-zjmkd" event={"ID":"1c9d5fb6-90b7-44ee-a019-df27b5c22eef","Type":"ContainerDied","Data":"a76d00e7e0ab24409bf1a349d819707708193997371d11f6467f1233e25252d1"} Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.714427 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a76d00e7e0ab24409bf1a349d819707708193997371d11f6467f1233e25252d1" Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.714463 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-log" containerID="cri-o://6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff" gracePeriod=30 Nov 25 16:01:05 crc kubenswrapper[4879]: I1125 16:01:05.714600 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-httpd" containerID="cri-o://9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f" gracePeriod=30 Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.303443 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402042 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-scripts\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402341 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8wsh\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-kube-api-access-q8wsh\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402434 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-config-data\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402469 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-logs\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402510 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-httpd-run\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402572 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-combined-ca-bundle\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.402602 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-ceph\") pod \"321d2e4f-c5a1-4566-b608-ad5b58de92de\" (UID: \"321d2e4f-c5a1-4566-b608-ad5b58de92de\") " Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.403046 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.403260 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-logs" (OuterVolumeSpecName: "logs") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.407078 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-ceph" (OuterVolumeSpecName: "ceph") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.407680 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-kube-api-access-q8wsh" (OuterVolumeSpecName: "kube-api-access-q8wsh") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "kube-api-access-q8wsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.408113 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-scripts" (OuterVolumeSpecName: "scripts") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.427043 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.451816 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-config-data" (OuterVolumeSpecName: "config-data") pod "321d2e4f-c5a1-4566-b608-ad5b58de92de" (UID: "321d2e4f-c5a1-4566-b608-ad5b58de92de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505228 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505260 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505272 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505280 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8wsh\" (UniqueName: \"kubernetes.io/projected/321d2e4f-c5a1-4566-b608-ad5b58de92de-kube-api-access-q8wsh\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505289 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505297 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321d2e4f-c5a1-4566-b608-ad5b58de92de-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.505304 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/321d2e4f-c5a1-4566-b608-ad5b58de92de-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.727246 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d9afebe2-9a18-46a4-b728-184544602da1","Type":"ContainerStarted","Data":"ea4560eee4b0c9f634d283236e61f653c8ce93b64b01edc7f6c3fc9ee0715dc0"} Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.727298 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d9afebe2-9a18-46a4-b728-184544602da1","Type":"ContainerStarted","Data":"919c6dac1862c4e9c307b3e34b91ae3c16a3d0f3406d3f2a72b15f798112cbe7"} Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731330 4879 generic.go:334] "Generic (PLEG): container finished" podID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerID="9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f" exitCode=0 Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731368 4879 generic.go:334] "Generic (PLEG): container finished" podID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerID="6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff" exitCode=143 Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731398 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"321d2e4f-c5a1-4566-b608-ad5b58de92de","Type":"ContainerDied","Data":"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f"} Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731432 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"321d2e4f-c5a1-4566-b608-ad5b58de92de","Type":"ContainerDied","Data":"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff"} Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731447 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"321d2e4f-c5a1-4566-b608-ad5b58de92de","Type":"ContainerDied","Data":"77c47ea18180d9bc3795d2d4e87e40357a271503271ad4761edcb7c0935336af"} Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731464 4879 scope.go:117] "RemoveContainer" containerID="9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.731591 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.763802 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=2.763778211 podStartE2EDuration="2.763778211s" podCreationTimestamp="2025-11-25 16:01:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:06.75108686 +0000 UTC m=+5758.354499931" watchObservedRunningTime="2025-11-25 16:01:06.763778211 +0000 UTC m=+5758.367191282" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.772450 4879 scope.go:117] "RemoveContainer" containerID="6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.778826 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.786202 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.796512 4879 scope.go:117] "RemoveContainer" containerID="9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f" Nov 25 16:01:06 crc kubenswrapper[4879]: E1125 16:01:06.799687 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f\": container with ID starting with 9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f not found: ID does not exist" containerID="9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.799745 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f"} err="failed to get container status \"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f\": rpc error: code = NotFound desc = could not find container \"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f\": container with ID starting with 9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f not found: ID does not exist" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.799783 4879 scope.go:117] "RemoveContainer" containerID="6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff" Nov 25 16:01:06 crc kubenswrapper[4879]: E1125 16:01:06.800306 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff\": container with ID starting with 6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff not found: ID does not exist" containerID="6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.800359 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff"} err="failed to get container status \"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff\": rpc error: code = NotFound desc = could not find container \"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff\": container with ID starting with 6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff not found: ID does not exist" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.800396 4879 scope.go:117] "RemoveContainer" containerID="9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.800746 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f"} err="failed to get container status \"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f\": rpc error: code = NotFound desc = could not find container \"9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f\": container with ID starting with 9f0ac76da0797e78c9339ee5d973318986420d1187207b43030f3d560803b79f not found: ID does not exist" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.800778 4879 scope.go:117] "RemoveContainer" containerID="6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.801168 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff"} err="failed to get container status \"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff\": rpc error: code = NotFound desc = could not find container \"6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff\": container with ID starting with 6b8734324e624b2762962d686a0ee70b9170ddba466f1c602316b61bd713efff not found: ID does not exist" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805065 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:06 crc kubenswrapper[4879]: E1125 16:01:06.805570 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-httpd" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805589 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-httpd" Nov 25 16:01:06 crc kubenswrapper[4879]: E1125 16:01:06.805630 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c9d5fb6-90b7-44ee-a019-df27b5c22eef" containerName="keystone-cron" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805638 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c9d5fb6-90b7-44ee-a019-df27b5c22eef" containerName="keystone-cron" Nov 25 16:01:06 crc kubenswrapper[4879]: E1125 16:01:06.805651 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-log" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805659 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-log" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805814 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c9d5fb6-90b7-44ee-a019-df27b5c22eef" containerName="keystone-cron" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805834 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-httpd" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.805847 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" containerName="glance-log" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.807022 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.812017 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.820228 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910695 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910748 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh6rd\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-kube-api-access-nh6rd\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-logs\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910831 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910929 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-config-data\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910972 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-ceph\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:06 crc kubenswrapper[4879]: I1125 16:01:06.910989 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-scripts\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012740 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-config-data\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012796 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-ceph\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012816 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-scripts\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012864 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012890 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh6rd\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-kube-api-access-nh6rd\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012911 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-logs\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.012936 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.013328 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.013477 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-logs\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.016959 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-ceph\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.017280 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-config-data\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.017319 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.020052 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-scripts\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.029726 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh6rd\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-kube-api-access-nh6rd\") pod \"glance-default-internal-api-0\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.126352 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.659110 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="321d2e4f-c5a1-4566-b608-ad5b58de92de" path="/var/lib/kubelet/pods/321d2e4f-c5a1-4566-b608-ad5b58de92de/volumes" Nov 25 16:01:07 crc kubenswrapper[4879]: W1125 16:01:07.659140 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbadfca4f_63b6_4ef4_bb69_da455c095844.slice/crio-00b96846b3f5b84152c8ab3218da623e81c3171016458e81e6571286bd3724c9 WatchSource:0}: Error finding container 00b96846b3f5b84152c8ab3218da623e81c3171016458e81e6571286bd3724c9: Status 404 returned error can't find the container with id 00b96846b3f5b84152c8ab3218da623e81c3171016458e81e6571286bd3724c9 Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.659851 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:01:07 crc kubenswrapper[4879]: I1125 16:01:07.746619 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"badfca4f-63b6-4ef4-bb69-da455c095844","Type":"ContainerStarted","Data":"00b96846b3f5b84152c8ab3218da623e81c3171016458e81e6571286bd3724c9"} Nov 25 16:01:08 crc kubenswrapper[4879]: I1125 16:01:08.767438 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"badfca4f-63b6-4ef4-bb69-da455c095844","Type":"ContainerStarted","Data":"740c04b6aa3bfd660d7471ea86a023f80988c86dc3a4bb2ab4d30e22cdeae3bf"} Nov 25 16:01:08 crc kubenswrapper[4879]: I1125 16:01:08.767954 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"badfca4f-63b6-4ef4-bb69-da455c095844","Type":"ContainerStarted","Data":"287dad8cc8bab35eda9435306e6a545520e62cbc9252b920d47fa2a903b4171e"} Nov 25 16:01:08 crc kubenswrapper[4879]: I1125 16:01:08.798502 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.798479754 podStartE2EDuration="2.798479754s" podCreationTimestamp="2025-11-25 16:01:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:08.791573758 +0000 UTC m=+5760.394986829" watchObservedRunningTime="2025-11-25 16:01:08.798479754 +0000 UTC m=+5760.401892825" Nov 25 16:01:09 crc kubenswrapper[4879]: E1125 16:01:09.058312 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64721e41_24b7_4492_825b_f25a899324f2.slice\": RecentStats: unable to find data in memory cache]" Nov 25 16:01:11 crc kubenswrapper[4879]: I1125 16:01:11.502304 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:11 crc kubenswrapper[4879]: I1125 16:01:11.566568 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b55c79499-8f2s5"] Nov 25 16:01:11 crc kubenswrapper[4879]: I1125 16:01:11.567004 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" containerName="dnsmasq-dns" containerID="cri-o://9bdb997481673af4eeb5e3b79066f6fa8e46f62d87f822558bc8c0e760b00b62" gracePeriod=10 Nov 25 16:01:11 crc kubenswrapper[4879]: I1125 16:01:11.797302 4879 generic.go:334] "Generic (PLEG): container finished" podID="4eca7f50-07c5-49a2-b120-9643e129234b" containerID="9bdb997481673af4eeb5e3b79066f6fa8e46f62d87f822558bc8c0e760b00b62" exitCode=0 Nov 25 16:01:11 crc kubenswrapper[4879]: I1125 16:01:11.797440 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" event={"ID":"4eca7f50-07c5-49a2-b120-9643e129234b","Type":"ContainerDied","Data":"9bdb997481673af4eeb5e3b79066f6fa8e46f62d87f822558bc8c0e760b00b62"} Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.036930 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.114340 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-sb\") pod \"4eca7f50-07c5-49a2-b120-9643e129234b\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.115256 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-dns-svc\") pod \"4eca7f50-07c5-49a2-b120-9643e129234b\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.115430 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svcs5\" (UniqueName: \"kubernetes.io/projected/4eca7f50-07c5-49a2-b120-9643e129234b-kube-api-access-svcs5\") pod \"4eca7f50-07c5-49a2-b120-9643e129234b\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.115485 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-config\") pod \"4eca7f50-07c5-49a2-b120-9643e129234b\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.115538 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-nb\") pod \"4eca7f50-07c5-49a2-b120-9643e129234b\" (UID: \"4eca7f50-07c5-49a2-b120-9643e129234b\") " Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.141811 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eca7f50-07c5-49a2-b120-9643e129234b-kube-api-access-svcs5" (OuterVolumeSpecName: "kube-api-access-svcs5") pod "4eca7f50-07c5-49a2-b120-9643e129234b" (UID: "4eca7f50-07c5-49a2-b120-9643e129234b"). InnerVolumeSpecName "kube-api-access-svcs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.168292 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-config" (OuterVolumeSpecName: "config") pod "4eca7f50-07c5-49a2-b120-9643e129234b" (UID: "4eca7f50-07c5-49a2-b120-9643e129234b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.175695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4eca7f50-07c5-49a2-b120-9643e129234b" (UID: "4eca7f50-07c5-49a2-b120-9643e129234b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.178385 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4eca7f50-07c5-49a2-b120-9643e129234b" (UID: "4eca7f50-07c5-49a2-b120-9643e129234b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.187883 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4eca7f50-07c5-49a2-b120-9643e129234b" (UID: "4eca7f50-07c5-49a2-b120-9643e129234b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.218067 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.218096 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.218107 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svcs5\" (UniqueName: \"kubernetes.io/projected/4eca7f50-07c5-49a2-b120-9643e129234b-kube-api-access-svcs5\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.218132 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.218140 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4eca7f50-07c5-49a2-b120-9643e129234b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.809600 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" event={"ID":"4eca7f50-07c5-49a2-b120-9643e129234b","Type":"ContainerDied","Data":"98d1a7cf9513658eaf8f80dc5dc782e6256d3a7b4b26800cae194e9e70dcfb18"} Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.809664 4879 scope.go:117] "RemoveContainer" containerID="9bdb997481673af4eeb5e3b79066f6fa8e46f62d87f822558bc8c0e760b00b62" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.809708 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b55c79499-8f2s5" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.838320 4879 scope.go:117] "RemoveContainer" containerID="2b5b729238be0f38a8a0678d844458c0c97569f89e7e50b87d80e2173326a475" Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.841485 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b55c79499-8f2s5"] Nov 25 16:01:12 crc kubenswrapper[4879]: I1125 16:01:12.852416 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b55c79499-8f2s5"] Nov 25 16:01:13 crc kubenswrapper[4879]: I1125 16:01:13.655514 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" path="/var/lib/kubelet/pods/4eca7f50-07c5-49a2-b120-9643e129234b/volumes" Nov 25 16:01:15 crc kubenswrapper[4879]: I1125 16:01:15.121383 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 16:01:15 crc kubenswrapper[4879]: I1125 16:01:15.121429 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 16:01:15 crc kubenswrapper[4879]: I1125 16:01:15.150556 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 16:01:15 crc kubenswrapper[4879]: I1125 16:01:15.159185 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 16:01:15 crc kubenswrapper[4879]: I1125 16:01:15.877194 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 16:01:15 crc kubenswrapper[4879]: I1125 16:01:15.877242 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.127271 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.128682 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.161993 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.167488 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.408563 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.408619 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.408663 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.409434 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.409494 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" gracePeriod=600 Nov 25 16:01:17 crc kubenswrapper[4879]: E1125 16:01:17.534064 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.854354 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.856993 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.896181 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" exitCode=0 Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.896230 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f"} Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.896315 4879 scope.go:117] "RemoveContainer" containerID="1dd6ec0d3ee4f3a94e99f78eba8962061f508166d482ac0ddcf87f6b6222597a" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.897051 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.897176 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:17 crc kubenswrapper[4879]: I1125 16:01:17.897380 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:01:17 crc kubenswrapper[4879]: E1125 16:01:17.897687 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:01:19 crc kubenswrapper[4879]: I1125 16:01:19.945085 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:19 crc kubenswrapper[4879]: I1125 16:01:19.945494 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 16:01:20 crc kubenswrapper[4879]: I1125 16:01:20.082520 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.802259 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tskj4"] Nov 25 16:01:25 crc kubenswrapper[4879]: E1125 16:01:25.803241 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" containerName="dnsmasq-dns" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.803261 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" containerName="dnsmasq-dns" Nov 25 16:01:25 crc kubenswrapper[4879]: E1125 16:01:25.803429 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" containerName="init" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.803442 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" containerName="init" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.803767 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eca7f50-07c5-49a2-b120-9643e129234b" containerName="dnsmasq-dns" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.804691 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tskj4" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.820063 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tskj4"] Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.903221 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e580-account-create-t2scz"] Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.904328 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.906939 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.911970 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e580-account-create-t2scz"] Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.944014 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71a42f-4b21-49d2-b873-c95a576ddcab-operator-scripts\") pod \"placement-db-create-tskj4\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " pod="openstack/placement-db-create-tskj4" Nov 25 16:01:25 crc kubenswrapper[4879]: I1125 16:01:25.944240 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jthc\" (UniqueName: \"kubernetes.io/projected/2a71a42f-4b21-49d2-b873-c95a576ddcab-kube-api-access-9jthc\") pod \"placement-db-create-tskj4\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " pod="openstack/placement-db-create-tskj4" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.045867 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jthc\" (UniqueName: \"kubernetes.io/projected/2a71a42f-4b21-49d2-b873-c95a576ddcab-kube-api-access-9jthc\") pod \"placement-db-create-tskj4\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " pod="openstack/placement-db-create-tskj4" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.045983 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1189794a-0f7c-44fa-8741-1f6c95c642c3-operator-scripts\") pod \"placement-e580-account-create-t2scz\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.046068 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwbl5\" (UniqueName: \"kubernetes.io/projected/1189794a-0f7c-44fa-8741-1f6c95c642c3-kube-api-access-kwbl5\") pod \"placement-e580-account-create-t2scz\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.046226 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71a42f-4b21-49d2-b873-c95a576ddcab-operator-scripts\") pod \"placement-db-create-tskj4\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " pod="openstack/placement-db-create-tskj4" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.047584 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71a42f-4b21-49d2-b873-c95a576ddcab-operator-scripts\") pod \"placement-db-create-tskj4\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " pod="openstack/placement-db-create-tskj4" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.077471 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jthc\" (UniqueName: \"kubernetes.io/projected/2a71a42f-4b21-49d2-b873-c95a576ddcab-kube-api-access-9jthc\") pod \"placement-db-create-tskj4\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " pod="openstack/placement-db-create-tskj4" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.125861 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tskj4" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.147705 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwbl5\" (UniqueName: \"kubernetes.io/projected/1189794a-0f7c-44fa-8741-1f6c95c642c3-kube-api-access-kwbl5\") pod \"placement-e580-account-create-t2scz\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.147867 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1189794a-0f7c-44fa-8741-1f6c95c642c3-operator-scripts\") pod \"placement-e580-account-create-t2scz\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.148717 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1189794a-0f7c-44fa-8741-1f6c95c642c3-operator-scripts\") pod \"placement-e580-account-create-t2scz\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.187567 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwbl5\" (UniqueName: \"kubernetes.io/projected/1189794a-0f7c-44fa-8741-1f6c95c642c3-kube-api-access-kwbl5\") pod \"placement-e580-account-create-t2scz\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.227588 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.684358 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tskj4"] Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.763596 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e580-account-create-t2scz"] Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.977595 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e580-account-create-t2scz" event={"ID":"1189794a-0f7c-44fa-8741-1f6c95c642c3","Type":"ContainerStarted","Data":"4f59867611e79372bf1b946a40bacea60d1c8317da436acf349170856377503f"} Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.978762 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e580-account-create-t2scz" event={"ID":"1189794a-0f7c-44fa-8741-1f6c95c642c3","Type":"ContainerStarted","Data":"38fc21ec35aa22dff8d8f81e46260e7f9e788b84a3d7fcb0fa27be04ff402b10"} Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.981595 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tskj4" event={"ID":"2a71a42f-4b21-49d2-b873-c95a576ddcab","Type":"ContainerStarted","Data":"d7d351252d44d94cbbe1d53ffe2a880f4c549c1f87a300e3870866a6739a2e12"} Nov 25 16:01:26 crc kubenswrapper[4879]: I1125 16:01:26.981908 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tskj4" event={"ID":"2a71a42f-4b21-49d2-b873-c95a576ddcab","Type":"ContainerStarted","Data":"7d0e5fe9a27e8f9188f66b6835d6e032e1afcd63e1ed587aa12dcddc49ef0070"} Nov 25 16:01:27 crc kubenswrapper[4879]: I1125 16:01:27.003544 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-e580-account-create-t2scz" podStartSLOduration=2.003523858 podStartE2EDuration="2.003523858s" podCreationTimestamp="2025-11-25 16:01:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:26.991907155 +0000 UTC m=+5778.595320226" watchObservedRunningTime="2025-11-25 16:01:27.003523858 +0000 UTC m=+5778.606936929" Nov 25 16:01:27 crc kubenswrapper[4879]: I1125 16:01:27.013780 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-tskj4" podStartSLOduration=2.013759753 podStartE2EDuration="2.013759753s" podCreationTimestamp="2025-11-25 16:01:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:27.005215183 +0000 UTC m=+5778.608628264" watchObservedRunningTime="2025-11-25 16:01:27.013759753 +0000 UTC m=+5778.617172824" Nov 25 16:01:27 crc kubenswrapper[4879]: I1125 16:01:27.990081 4879 generic.go:334] "Generic (PLEG): container finished" podID="2a71a42f-4b21-49d2-b873-c95a576ddcab" containerID="d7d351252d44d94cbbe1d53ffe2a880f4c549c1f87a300e3870866a6739a2e12" exitCode=0 Nov 25 16:01:27 crc kubenswrapper[4879]: I1125 16:01:27.990150 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tskj4" event={"ID":"2a71a42f-4b21-49d2-b873-c95a576ddcab","Type":"ContainerDied","Data":"d7d351252d44d94cbbe1d53ffe2a880f4c549c1f87a300e3870866a6739a2e12"} Nov 25 16:01:27 crc kubenswrapper[4879]: I1125 16:01:27.992890 4879 generic.go:334] "Generic (PLEG): container finished" podID="1189794a-0f7c-44fa-8741-1f6c95c642c3" containerID="4f59867611e79372bf1b946a40bacea60d1c8317da436acf349170856377503f" exitCode=0 Nov 25 16:01:27 crc kubenswrapper[4879]: I1125 16:01:27.992933 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e580-account-create-t2scz" event={"ID":"1189794a-0f7c-44fa-8741-1f6c95c642c3","Type":"ContainerDied","Data":"4f59867611e79372bf1b946a40bacea60d1c8317da436acf349170856377503f"} Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.411638 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.421810 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tskj4" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.608433 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1189794a-0f7c-44fa-8741-1f6c95c642c3-operator-scripts\") pod \"1189794a-0f7c-44fa-8741-1f6c95c642c3\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.608900 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwbl5\" (UniqueName: \"kubernetes.io/projected/1189794a-0f7c-44fa-8741-1f6c95c642c3-kube-api-access-kwbl5\") pod \"1189794a-0f7c-44fa-8741-1f6c95c642c3\" (UID: \"1189794a-0f7c-44fa-8741-1f6c95c642c3\") " Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.608965 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jthc\" (UniqueName: \"kubernetes.io/projected/2a71a42f-4b21-49d2-b873-c95a576ddcab-kube-api-access-9jthc\") pod \"2a71a42f-4b21-49d2-b873-c95a576ddcab\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.609031 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71a42f-4b21-49d2-b873-c95a576ddcab-operator-scripts\") pod \"2a71a42f-4b21-49d2-b873-c95a576ddcab\" (UID: \"2a71a42f-4b21-49d2-b873-c95a576ddcab\") " Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.609711 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1189794a-0f7c-44fa-8741-1f6c95c642c3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1189794a-0f7c-44fa-8741-1f6c95c642c3" (UID: "1189794a-0f7c-44fa-8741-1f6c95c642c3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.609737 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a71a42f-4b21-49d2-b873-c95a576ddcab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a71a42f-4b21-49d2-b873-c95a576ddcab" (UID: "2a71a42f-4b21-49d2-b873-c95a576ddcab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.614285 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a71a42f-4b21-49d2-b873-c95a576ddcab-kube-api-access-9jthc" (OuterVolumeSpecName: "kube-api-access-9jthc") pod "2a71a42f-4b21-49d2-b873-c95a576ddcab" (UID: "2a71a42f-4b21-49d2-b873-c95a576ddcab"). InnerVolumeSpecName "kube-api-access-9jthc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.614679 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1189794a-0f7c-44fa-8741-1f6c95c642c3-kube-api-access-kwbl5" (OuterVolumeSpecName: "kube-api-access-kwbl5") pod "1189794a-0f7c-44fa-8741-1f6c95c642c3" (UID: "1189794a-0f7c-44fa-8741-1f6c95c642c3"). InnerVolumeSpecName "kube-api-access-kwbl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.710805 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwbl5\" (UniqueName: \"kubernetes.io/projected/1189794a-0f7c-44fa-8741-1f6c95c642c3-kube-api-access-kwbl5\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.710834 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jthc\" (UniqueName: \"kubernetes.io/projected/2a71a42f-4b21-49d2-b873-c95a576ddcab-kube-api-access-9jthc\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.710844 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a71a42f-4b21-49d2-b873-c95a576ddcab-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:29 crc kubenswrapper[4879]: I1125 16:01:29.710854 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1189794a-0f7c-44fa-8741-1f6c95c642c3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.010873 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e580-account-create-t2scz" Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.011222 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e580-account-create-t2scz" event={"ID":"1189794a-0f7c-44fa-8741-1f6c95c642c3","Type":"ContainerDied","Data":"38fc21ec35aa22dff8d8f81e46260e7f9e788b84a3d7fcb0fa27be04ff402b10"} Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.011266 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38fc21ec35aa22dff8d8f81e46260e7f9e788b84a3d7fcb0fa27be04ff402b10" Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.013323 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tskj4" event={"ID":"2a71a42f-4b21-49d2-b873-c95a576ddcab","Type":"ContainerDied","Data":"7d0e5fe9a27e8f9188f66b6835d6e032e1afcd63e1ed587aa12dcddc49ef0070"} Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.013347 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d0e5fe9a27e8f9188f66b6835d6e032e1afcd63e1ed587aa12dcddc49ef0070" Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.013402 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tskj4" Nov 25 16:01:30 crc kubenswrapper[4879]: I1125 16:01:30.645163 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:01:30 crc kubenswrapper[4879]: E1125 16:01:30.645610 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.318438 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f969fd95-tx5c6"] Nov 25 16:01:31 crc kubenswrapper[4879]: E1125 16:01:31.319088 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a71a42f-4b21-49d2-b873-c95a576ddcab" containerName="mariadb-database-create" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.319106 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a71a42f-4b21-49d2-b873-c95a576ddcab" containerName="mariadb-database-create" Nov 25 16:01:31 crc kubenswrapper[4879]: E1125 16:01:31.319196 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1189794a-0f7c-44fa-8741-1f6c95c642c3" containerName="mariadb-account-create" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.319207 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1189794a-0f7c-44fa-8741-1f6c95c642c3" containerName="mariadb-account-create" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.319379 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a71a42f-4b21-49d2-b873-c95a576ddcab" containerName="mariadb-database-create" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.319401 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1189794a-0f7c-44fa-8741-1f6c95c642c3" containerName="mariadb-account-create" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.320573 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.334774 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f969fd95-tx5c6"] Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.334937 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-sb\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.335305 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-dns-svc\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.335383 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-config\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.335441 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-nb\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.335494 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7m5j\" (UniqueName: \"kubernetes.io/projected/9d1886cd-560a-4c14-9e9d-286cbcef59d1-kube-api-access-q7m5j\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.372925 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-zw6jh"] Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.375218 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.377972 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.378237 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qj9b9" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.378500 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.393905 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zw6jh"] Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.436994 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-config-data\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437047 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-logs\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437231 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-scripts\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437373 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-dns-svc\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437437 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-config\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437467 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-nb\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437498 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6k2b\" (UniqueName: \"kubernetes.io/projected/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-kube-api-access-c6k2b\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437528 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7m5j\" (UniqueName: \"kubernetes.io/projected/9d1886cd-560a-4c14-9e9d-286cbcef59d1-kube-api-access-q7m5j\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437558 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-sb\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.437582 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-combined-ca-bundle\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.438421 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-config\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.438545 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-nb\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.438745 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-sb\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.438822 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-dns-svc\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.456641 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7m5j\" (UniqueName: \"kubernetes.io/projected/9d1886cd-560a-4c14-9e9d-286cbcef59d1-kube-api-access-q7m5j\") pod \"dnsmasq-dns-74f969fd95-tx5c6\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.538896 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6k2b\" (UniqueName: \"kubernetes.io/projected/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-kube-api-access-c6k2b\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.538952 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-combined-ca-bundle\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.538988 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-config-data\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.539007 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-logs\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.539043 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-scripts\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.539739 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-logs\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.542099 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-scripts\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.542211 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-config-data\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.542597 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-combined-ca-bundle\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.557917 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6k2b\" (UniqueName: \"kubernetes.io/projected/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-kube-api-access-c6k2b\") pod \"placement-db-sync-zw6jh\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.636673 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:31 crc kubenswrapper[4879]: I1125 16:01:31.694528 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:32 crc kubenswrapper[4879]: I1125 16:01:32.087677 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f969fd95-tx5c6"] Nov 25 16:01:32 crc kubenswrapper[4879]: I1125 16:01:32.189427 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zw6jh"] Nov 25 16:01:32 crc kubenswrapper[4879]: W1125 16:01:32.190760 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b8f43aa_7cc1_4a2c_9e64_292a98bc5455.slice/crio-0154ac23bc6d5e3586da5188f1adc420081bcf6c72d72c5fb25451caa1abf6bb WatchSource:0}: Error finding container 0154ac23bc6d5e3586da5188f1adc420081bcf6c72d72c5fb25451caa1abf6bb: Status 404 returned error can't find the container with id 0154ac23bc6d5e3586da5188f1adc420081bcf6c72d72c5fb25451caa1abf6bb Nov 25 16:01:33 crc kubenswrapper[4879]: I1125 16:01:33.040297 4879 generic.go:334] "Generic (PLEG): container finished" podID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerID="153fcb577e6424b89530e3f9ac004f03c782ce37e1dcf77c435e4ab144420b2d" exitCode=0 Nov 25 16:01:33 crc kubenswrapper[4879]: I1125 16:01:33.040353 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" event={"ID":"9d1886cd-560a-4c14-9e9d-286cbcef59d1","Type":"ContainerDied","Data":"153fcb577e6424b89530e3f9ac004f03c782ce37e1dcf77c435e4ab144420b2d"} Nov 25 16:01:33 crc kubenswrapper[4879]: I1125 16:01:33.043282 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" event={"ID":"9d1886cd-560a-4c14-9e9d-286cbcef59d1","Type":"ContainerStarted","Data":"5cf094ed7eb456db9fef6783a089d45f8d02508748a30164f473c0b12bd9f1a5"} Nov 25 16:01:33 crc kubenswrapper[4879]: I1125 16:01:33.044721 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zw6jh" event={"ID":"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455","Type":"ContainerStarted","Data":"c567daa090999a7a99210876f239137acc0f6558c86a8dcabde011c379ffd274"} Nov 25 16:01:33 crc kubenswrapper[4879]: I1125 16:01:33.044750 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zw6jh" event={"ID":"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455","Type":"ContainerStarted","Data":"0154ac23bc6d5e3586da5188f1adc420081bcf6c72d72c5fb25451caa1abf6bb"} Nov 25 16:01:33 crc kubenswrapper[4879]: I1125 16:01:33.087465 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-zw6jh" podStartSLOduration=2.087438515 podStartE2EDuration="2.087438515s" podCreationTimestamp="2025-11-25 16:01:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:33.082940774 +0000 UTC m=+5784.686353845" watchObservedRunningTime="2025-11-25 16:01:33.087438515 +0000 UTC m=+5784.690851596" Nov 25 16:01:34 crc kubenswrapper[4879]: I1125 16:01:34.054981 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" event={"ID":"9d1886cd-560a-4c14-9e9d-286cbcef59d1","Type":"ContainerStarted","Data":"d1add8973062932288eb52a3078bd836a5933523ff285e2b58257efb51f159f2"} Nov 25 16:01:34 crc kubenswrapper[4879]: I1125 16:01:34.055334 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:34 crc kubenswrapper[4879]: I1125 16:01:34.056910 4879 generic.go:334] "Generic (PLEG): container finished" podID="7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" containerID="c567daa090999a7a99210876f239137acc0f6558c86a8dcabde011c379ffd274" exitCode=0 Nov 25 16:01:34 crc kubenswrapper[4879]: I1125 16:01:34.056984 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zw6jh" event={"ID":"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455","Type":"ContainerDied","Data":"c567daa090999a7a99210876f239137acc0f6558c86a8dcabde011c379ffd274"} Nov 25 16:01:34 crc kubenswrapper[4879]: I1125 16:01:34.080000 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" podStartSLOduration=3.079978514 podStartE2EDuration="3.079978514s" podCreationTimestamp="2025-11-25 16:01:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:34.071702521 +0000 UTC m=+5785.675115602" watchObservedRunningTime="2025-11-25 16:01:34.079978514 +0000 UTC m=+5785.683391585" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.402682 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.502785 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-scripts\") pod \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.502865 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-logs\") pod \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.502937 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-combined-ca-bundle\") pod \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.503491 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-logs" (OuterVolumeSpecName: "logs") pod "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" (UID: "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.503056 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-config-data\") pod \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.504032 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6k2b\" (UniqueName: \"kubernetes.io/projected/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-kube-api-access-c6k2b\") pod \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\" (UID: \"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455\") " Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.504582 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.521768 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-kube-api-access-c6k2b" (OuterVolumeSpecName: "kube-api-access-c6k2b") pod "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" (UID: "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455"). InnerVolumeSpecName "kube-api-access-c6k2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.521924 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-scripts" (OuterVolumeSpecName: "scripts") pod "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" (UID: "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.531482 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-config-data" (OuterVolumeSpecName: "config-data") pod "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" (UID: "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.532025 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" (UID: "7b8f43aa-7cc1-4a2c-9e64-292a98bc5455"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.607170 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.607524 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.607539 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:35 crc kubenswrapper[4879]: I1125 16:01:35.607550 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6k2b\" (UniqueName: \"kubernetes.io/projected/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455-kube-api-access-c6k2b\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.073731 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zw6jh" event={"ID":"7b8f43aa-7cc1-4a2c-9e64-292a98bc5455","Type":"ContainerDied","Data":"0154ac23bc6d5e3586da5188f1adc420081bcf6c72d72c5fb25451caa1abf6bb"} Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.074064 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0154ac23bc6d5e3586da5188f1adc420081bcf6c72d72c5fb25451caa1abf6bb" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.073789 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zw6jh" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.157059 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-9cd976bcb-hcsm7"] Nov 25 16:01:36 crc kubenswrapper[4879]: E1125 16:01:36.157417 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" containerName="placement-db-sync" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.157433 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" containerName="placement-db-sync" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.157618 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" containerName="placement-db-sync" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.158532 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.160889 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qj9b9" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.161294 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.161545 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.174661 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9cd976bcb-hcsm7"] Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.317752 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-combined-ca-bundle\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.317801 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a69eda1-9308-4df4-8135-27e97e2e834a-logs\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.317996 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-config-data\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.318039 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zclj\" (UniqueName: \"kubernetes.io/projected/0a69eda1-9308-4df4-8135-27e97e2e834a-kube-api-access-7zclj\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.318095 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-scripts\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.419551 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-config-data\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.419605 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zclj\" (UniqueName: \"kubernetes.io/projected/0a69eda1-9308-4df4-8135-27e97e2e834a-kube-api-access-7zclj\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.419638 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-scripts\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.419725 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-combined-ca-bundle\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.419745 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a69eda1-9308-4df4-8135-27e97e2e834a-logs\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.420284 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a69eda1-9308-4df4-8135-27e97e2e834a-logs\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.424736 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-scripts\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.437082 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-config-data\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.437910 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a69eda1-9308-4df4-8135-27e97e2e834a-combined-ca-bundle\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.447910 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zclj\" (UniqueName: \"kubernetes.io/projected/0a69eda1-9308-4df4-8135-27e97e2e834a-kube-api-access-7zclj\") pod \"placement-9cd976bcb-hcsm7\" (UID: \"0a69eda1-9308-4df4-8135-27e97e2e834a\") " pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.479318 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:36 crc kubenswrapper[4879]: I1125 16:01:36.913463 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9cd976bcb-hcsm7"] Nov 25 16:01:36 crc kubenswrapper[4879]: W1125 16:01:36.916861 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a69eda1_9308_4df4_8135_27e97e2e834a.slice/crio-ebb993625d092ee68f74140ed1a5590beb2463d0ee52863cec6c04c9a36db295 WatchSource:0}: Error finding container ebb993625d092ee68f74140ed1a5590beb2463d0ee52863cec6c04c9a36db295: Status 404 returned error can't find the container with id ebb993625d092ee68f74140ed1a5590beb2463d0ee52863cec6c04c9a36db295 Nov 25 16:01:37 crc kubenswrapper[4879]: I1125 16:01:37.083742 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9cd976bcb-hcsm7" event={"ID":"0a69eda1-9308-4df4-8135-27e97e2e834a","Type":"ContainerStarted","Data":"6bccc840e89929fdce14fe19ca366c95e6eeb7bdb93a208e72930721530f7f1e"} Nov 25 16:01:37 crc kubenswrapper[4879]: I1125 16:01:37.083795 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9cd976bcb-hcsm7" event={"ID":"0a69eda1-9308-4df4-8135-27e97e2e834a","Type":"ContainerStarted","Data":"ebb993625d092ee68f74140ed1a5590beb2463d0ee52863cec6c04c9a36db295"} Nov 25 16:01:38 crc kubenswrapper[4879]: I1125 16:01:38.094814 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9cd976bcb-hcsm7" event={"ID":"0a69eda1-9308-4df4-8135-27e97e2e834a","Type":"ContainerStarted","Data":"8005726486b7a401982ffce558cee0bf743c11de5dad5564ba1c1e5c7230b965"} Nov 25 16:01:38 crc kubenswrapper[4879]: I1125 16:01:38.095189 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:38 crc kubenswrapper[4879]: I1125 16:01:38.095213 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:01:38 crc kubenswrapper[4879]: I1125 16:01:38.128190 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-9cd976bcb-hcsm7" podStartSLOduration=2.12816672 podStartE2EDuration="2.12816672s" podCreationTimestamp="2025-11-25 16:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:01:38.120407801 +0000 UTC m=+5789.723820872" watchObservedRunningTime="2025-11-25 16:01:38.12816672 +0000 UTC m=+5789.731579791" Nov 25 16:01:41 crc kubenswrapper[4879]: I1125 16:01:41.638399 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:01:41 crc kubenswrapper[4879]: I1125 16:01:41.691869 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6c7b5c97-89xwp"] Nov 25 16:01:41 crc kubenswrapper[4879]: I1125 16:01:41.692375 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerName="dnsmasq-dns" containerID="cri-o://4a5af38f4fd63a331c2146c8889a6d2a137fda52def4f2148d6193ff143256fe" gracePeriod=10 Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.130886 4879 generic.go:334] "Generic (PLEG): container finished" podID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerID="4a5af38f4fd63a331c2146c8889a6d2a137fda52def4f2148d6193ff143256fe" exitCode=0 Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.130971 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" event={"ID":"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4","Type":"ContainerDied","Data":"4a5af38f4fd63a331c2146c8889a6d2a137fda52def4f2148d6193ff143256fe"} Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.131247 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" event={"ID":"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4","Type":"ContainerDied","Data":"333e4580213d40c293826f7b3f55c8b53f0eed179b1f1f8dac3f1229ea87e35f"} Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.131269 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="333e4580213d40c293826f7b3f55c8b53f0eed179b1f1f8dac3f1229ea87e35f" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.171283 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.311679 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-nb\") pod \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.311761 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-config\") pod \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.311857 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-dns-svc\") pod \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.311939 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-sb\") pod \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.311972 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxbr7\" (UniqueName: \"kubernetes.io/projected/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-kube-api-access-qxbr7\") pod \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\" (UID: \"56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4\") " Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.322228 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-kube-api-access-qxbr7" (OuterVolumeSpecName: "kube-api-access-qxbr7") pod "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" (UID: "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4"). InnerVolumeSpecName "kube-api-access-qxbr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.369981 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" (UID: "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.373017 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" (UID: "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.373289 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-config" (OuterVolumeSpecName: "config") pod "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" (UID: "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.374292 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" (UID: "56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.414465 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxbr7\" (UniqueName: \"kubernetes.io/projected/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-kube-api-access-qxbr7\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.414501 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.414514 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.414525 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:42 crc kubenswrapper[4879]: I1125 16:01:42.414536 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:01:43 crc kubenswrapper[4879]: I1125 16:01:43.139832 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6c7b5c97-89xwp" Nov 25 16:01:43 crc kubenswrapper[4879]: I1125 16:01:43.173737 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6c7b5c97-89xwp"] Nov 25 16:01:43 crc kubenswrapper[4879]: I1125 16:01:43.182342 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6c7b5c97-89xwp"] Nov 25 16:01:43 crc kubenswrapper[4879]: I1125 16:01:43.470548 4879 scope.go:117] "RemoveContainer" containerID="ec1f6d8ab59479b1f76ce78af3a26b697d95afda0c449311d018abb8bda53052" Nov 25 16:01:43 crc kubenswrapper[4879]: I1125 16:01:43.654775 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" path="/var/lib/kubelet/pods/56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4/volumes" Nov 25 16:01:45 crc kubenswrapper[4879]: I1125 16:01:45.644317 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:01:45 crc kubenswrapper[4879]: E1125 16:01:45.644848 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:01:56 crc kubenswrapper[4879]: I1125 16:01:56.644882 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:01:56 crc kubenswrapper[4879]: E1125 16:01:56.645598 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:02:07 crc kubenswrapper[4879]: I1125 16:02:07.490995 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:02:07 crc kubenswrapper[4879]: I1125 16:02:07.491625 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-9cd976bcb-hcsm7" Nov 25 16:02:07 crc kubenswrapper[4879]: I1125 16:02:07.645348 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:02:07 crc kubenswrapper[4879]: E1125 16:02:07.645800 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:02:18 crc kubenswrapper[4879]: I1125 16:02:18.645216 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:02:18 crc kubenswrapper[4879]: E1125 16:02:18.646270 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.501813 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-r2pkk"] Nov 25 16:02:28 crc kubenswrapper[4879]: E1125 16:02:28.502706 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerName="dnsmasq-dns" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.502718 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerName="dnsmasq-dns" Nov 25 16:02:28 crc kubenswrapper[4879]: E1125 16:02:28.502748 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerName="init" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.502753 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerName="init" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.502928 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="56bb05d0-fd4b-4fe7-9c53-988f9d2ba3d4" containerName="dnsmasq-dns" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.503697 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.514553 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-r2pkk"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.602288 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-gbd6q"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.603933 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.605554 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmshq\" (UniqueName: \"kubernetes.io/projected/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-kube-api-access-cmshq\") pod \"nova-api-db-create-r2pkk\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.605620 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-operator-scripts\") pod \"nova-api-db-create-r2pkk\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.625164 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-ab01-account-create-7c2ms"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.626420 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.628978 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.636113 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-gbd6q"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.645892 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-ab01-account-create-7c2ms"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.701013 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-f52rd"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.702108 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.706631 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-operator-scripts\") pod \"nova-api-db-create-r2pkk\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.706700 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-operator-scripts\") pod \"nova-cell0-db-create-gbd6q\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.706826 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh45k\" (UniqueName: \"kubernetes.io/projected/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-kube-api-access-dh45k\") pod \"nova-cell0-db-create-gbd6q\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.706850 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmshq\" (UniqueName: \"kubernetes.io/projected/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-kube-api-access-cmshq\") pod \"nova-api-db-create-r2pkk\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.707495 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-operator-scripts\") pod \"nova-api-db-create-r2pkk\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.713649 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-f52rd"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.730671 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmshq\" (UniqueName: \"kubernetes.io/projected/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-kube-api-access-cmshq\") pod \"nova-api-db-create-r2pkk\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.804768 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d412-account-create-llfmj"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.807228 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.808077 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2965a20-990b-495d-bfc0-97948383e941-operator-scripts\") pod \"nova-cell1-db-create-f52rd\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.808157 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh45k\" (UniqueName: \"kubernetes.io/projected/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-kube-api-access-dh45k\") pod \"nova-cell0-db-create-gbd6q\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.808192 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr68n\" (UniqueName: \"kubernetes.io/projected/9c525edb-f49c-4350-a3df-c487163f3d31-kube-api-access-pr68n\") pod \"nova-api-ab01-account-create-7c2ms\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.808220 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c525edb-f49c-4350-a3df-c487163f3d31-operator-scripts\") pod \"nova-api-ab01-account-create-7c2ms\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.808348 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-operator-scripts\") pod \"nova-cell0-db-create-gbd6q\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.808671 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2t99\" (UniqueName: \"kubernetes.io/projected/a2965a20-990b-495d-bfc0-97948383e941-kube-api-access-b2t99\") pod \"nova-cell1-db-create-f52rd\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.809252 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.809633 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-operator-scripts\") pod \"nova-cell0-db-create-gbd6q\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.823963 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.824085 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d412-account-create-llfmj"] Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.841868 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh45k\" (UniqueName: \"kubernetes.io/projected/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-kube-api-access-dh45k\") pod \"nova-cell0-db-create-gbd6q\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.910233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19be5c82-30a2-4ec4-8d8d-a087ad293e27-operator-scripts\") pod \"nova-cell0-d412-account-create-llfmj\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.910697 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2t99\" (UniqueName: \"kubernetes.io/projected/a2965a20-990b-495d-bfc0-97948383e941-kube-api-access-b2t99\") pod \"nova-cell1-db-create-f52rd\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.910747 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7bfh\" (UniqueName: \"kubernetes.io/projected/19be5c82-30a2-4ec4-8d8d-a087ad293e27-kube-api-access-p7bfh\") pod \"nova-cell0-d412-account-create-llfmj\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.910779 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2965a20-990b-495d-bfc0-97948383e941-operator-scripts\") pod \"nova-cell1-db-create-f52rd\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.910821 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr68n\" (UniqueName: \"kubernetes.io/projected/9c525edb-f49c-4350-a3df-c487163f3d31-kube-api-access-pr68n\") pod \"nova-api-ab01-account-create-7c2ms\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.910842 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c525edb-f49c-4350-a3df-c487163f3d31-operator-scripts\") pod \"nova-api-ab01-account-create-7c2ms\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.911546 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c525edb-f49c-4350-a3df-c487163f3d31-operator-scripts\") pod \"nova-api-ab01-account-create-7c2ms\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.912041 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2965a20-990b-495d-bfc0-97948383e941-operator-scripts\") pod \"nova-cell1-db-create-f52rd\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.917863 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.929237 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr68n\" (UniqueName: \"kubernetes.io/projected/9c525edb-f49c-4350-a3df-c487163f3d31-kube-api-access-pr68n\") pod \"nova-api-ab01-account-create-7c2ms\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.930244 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2t99\" (UniqueName: \"kubernetes.io/projected/a2965a20-990b-495d-bfc0-97948383e941-kube-api-access-b2t99\") pod \"nova-cell1-db-create-f52rd\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:28 crc kubenswrapper[4879]: I1125 16:02:28.941869 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.013409 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19be5c82-30a2-4ec4-8d8d-a087ad293e27-operator-scripts\") pod \"nova-cell0-d412-account-create-llfmj\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.013597 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7bfh\" (UniqueName: \"kubernetes.io/projected/19be5c82-30a2-4ec4-8d8d-a087ad293e27-kube-api-access-p7bfh\") pod \"nova-cell0-d412-account-create-llfmj\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.014199 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19be5c82-30a2-4ec4-8d8d-a087ad293e27-operator-scripts\") pod \"nova-cell0-d412-account-create-llfmj\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.016338 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.030388 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-786c-account-create-vxgsn"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.031798 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.035479 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.039694 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7bfh\" (UniqueName: \"kubernetes.io/projected/19be5c82-30a2-4ec4-8d8d-a087ad293e27-kube-api-access-p7bfh\") pod \"nova-cell0-d412-account-create-llfmj\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.044554 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-786c-account-create-vxgsn"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.131987 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.218281 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4hrm\" (UniqueName: \"kubernetes.io/projected/4754f5d0-b42f-43e6-987d-f2b28dba3afd-kube-api-access-g4hrm\") pod \"nova-cell1-786c-account-create-vxgsn\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.218638 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4754f5d0-b42f-43e6-987d-f2b28dba3afd-operator-scripts\") pod \"nova-cell1-786c-account-create-vxgsn\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.242704 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-gbd6q"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.312302 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-r2pkk"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.321259 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4hrm\" (UniqueName: \"kubernetes.io/projected/4754f5d0-b42f-43e6-987d-f2b28dba3afd-kube-api-access-g4hrm\") pod \"nova-cell1-786c-account-create-vxgsn\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.321443 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4754f5d0-b42f-43e6-987d-f2b28dba3afd-operator-scripts\") pod \"nova-cell1-786c-account-create-vxgsn\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.322377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4754f5d0-b42f-43e6-987d-f2b28dba3afd-operator-scripts\") pod \"nova-cell1-786c-account-create-vxgsn\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.342104 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4hrm\" (UniqueName: \"kubernetes.io/projected/4754f5d0-b42f-43e6-987d-f2b28dba3afd-kube-api-access-g4hrm\") pod \"nova-cell1-786c-account-create-vxgsn\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.358051 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.519061 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-ab01-account-create-7c2ms"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.581600 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-gbd6q" event={"ID":"3c60211d-f049-43ab-b32d-ea63f4bc8b7d","Type":"ContainerStarted","Data":"029bd82769871d4ae90d07a4be8a60ebf046fafc94fb4a058defbe8e43e75c05"} Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.584487 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-f52rd"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.585239 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r2pkk" event={"ID":"bb7b8ca6-8b45-432a-8b46-83efaf30fe09","Type":"ContainerStarted","Data":"2e086a1a8e89676991eae53c28e0c5557b36401be2ae65801364cd90a64f31fb"} Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.587564 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ab01-account-create-7c2ms" event={"ID":"9c525edb-f49c-4350-a3df-c487163f3d31","Type":"ContainerStarted","Data":"7cecd613f2a36a7d30a6316ec5e55bf034655534c10ea79120db1c084de37a9f"} Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.638048 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d412-account-create-llfmj"] Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.666159 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:02:29 crc kubenswrapper[4879]: E1125 16:02:29.666494 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:02:29 crc kubenswrapper[4879]: I1125 16:02:29.814677 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-786c-account-create-vxgsn"] Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.597465 4879 generic.go:334] "Generic (PLEG): container finished" podID="19be5c82-30a2-4ec4-8d8d-a087ad293e27" containerID="78ffcf73b3b6ef721760359a710f0a610bb88545c896af29596ae1650ecce48d" exitCode=0 Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.597617 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d412-account-create-llfmj" event={"ID":"19be5c82-30a2-4ec4-8d8d-a087ad293e27","Type":"ContainerDied","Data":"78ffcf73b3b6ef721760359a710f0a610bb88545c896af29596ae1650ecce48d"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.597866 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d412-account-create-llfmj" event={"ID":"19be5c82-30a2-4ec4-8d8d-a087ad293e27","Type":"ContainerStarted","Data":"16c759e0fc6f9d0a29146a5fa610e96768109512e3477d05c2df9ff54ff5c8bf"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.600483 4879 generic.go:334] "Generic (PLEG): container finished" podID="bb7b8ca6-8b45-432a-8b46-83efaf30fe09" containerID="38a081af9f62eacc948fe468b2bbb17d16b7fb024be66cb344948ab42e07eae6" exitCode=0 Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.600569 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r2pkk" event={"ID":"bb7b8ca6-8b45-432a-8b46-83efaf30fe09","Type":"ContainerDied","Data":"38a081af9f62eacc948fe468b2bbb17d16b7fb024be66cb344948ab42e07eae6"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.602393 4879 generic.go:334] "Generic (PLEG): container finished" podID="4754f5d0-b42f-43e6-987d-f2b28dba3afd" containerID="79de520561295a44d129f531fcd4366e23c35c0f00cf3c5990f7f3a32364b17a" exitCode=0 Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.602439 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-786c-account-create-vxgsn" event={"ID":"4754f5d0-b42f-43e6-987d-f2b28dba3afd","Type":"ContainerDied","Data":"79de520561295a44d129f531fcd4366e23c35c0f00cf3c5990f7f3a32364b17a"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.602457 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-786c-account-create-vxgsn" event={"ID":"4754f5d0-b42f-43e6-987d-f2b28dba3afd","Type":"ContainerStarted","Data":"de2130293b589cbda4459c8006b9007b1a7276cb83e3b14b259396bdb6b38dea"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.604042 4879 generic.go:334] "Generic (PLEG): container finished" podID="a2965a20-990b-495d-bfc0-97948383e941" containerID="5c2ffe3cb4a82b2e5427b22d6a7185c3989c0c9a173486d8de04ad9fa2daab94" exitCode=0 Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.604096 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-f52rd" event={"ID":"a2965a20-990b-495d-bfc0-97948383e941","Type":"ContainerDied","Data":"5c2ffe3cb4a82b2e5427b22d6a7185c3989c0c9a173486d8de04ad9fa2daab94"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.604139 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-f52rd" event={"ID":"a2965a20-990b-495d-bfc0-97948383e941","Type":"ContainerStarted","Data":"23804565760fcc52db4690bb4de808c3ab35befea756e0d64929d679960db009"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.605660 4879 generic.go:334] "Generic (PLEG): container finished" podID="9c525edb-f49c-4350-a3df-c487163f3d31" containerID="93c78a976db9d1a92e7f6eaaaa89213fe6e60ade7190c66ec94822d3308b1444" exitCode=0 Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.605997 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ab01-account-create-7c2ms" event={"ID":"9c525edb-f49c-4350-a3df-c487163f3d31","Type":"ContainerDied","Data":"93c78a976db9d1a92e7f6eaaaa89213fe6e60ade7190c66ec94822d3308b1444"} Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.610270 4879 generic.go:334] "Generic (PLEG): container finished" podID="3c60211d-f049-43ab-b32d-ea63f4bc8b7d" containerID="cf4f3adeea56b6ed9f84d8eb510c8c2f25a4cdc54659d007c8ba24c285ec6d48" exitCode=0 Nov 25 16:02:30 crc kubenswrapper[4879]: I1125 16:02:30.610302 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-gbd6q" event={"ID":"3c60211d-f049-43ab-b32d-ea63f4bc8b7d","Type":"ContainerDied","Data":"cf4f3adeea56b6ed9f84d8eb510c8c2f25a4cdc54659d007c8ba24c285ec6d48"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.035686 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.173923 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dh45k\" (UniqueName: \"kubernetes.io/projected/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-kube-api-access-dh45k\") pod \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.174396 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-operator-scripts\") pod \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\" (UID: \"3c60211d-f049-43ab-b32d-ea63f4bc8b7d\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.175349 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3c60211d-f049-43ab-b32d-ea63f4bc8b7d" (UID: "3c60211d-f049-43ab-b32d-ea63f4bc8b7d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.180785 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-kube-api-access-dh45k" (OuterVolumeSpecName: "kube-api-access-dh45k") pod "3c60211d-f049-43ab-b32d-ea63f4bc8b7d" (UID: "3c60211d-f049-43ab-b32d-ea63f4bc8b7d"). InnerVolumeSpecName "kube-api-access-dh45k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.273276 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.275310 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19be5c82-30a2-4ec4-8d8d-a087ad293e27-operator-scripts\") pod \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.275442 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7bfh\" (UniqueName: \"kubernetes.io/projected/19be5c82-30a2-4ec4-8d8d-a087ad293e27-kube-api-access-p7bfh\") pod \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\" (UID: \"19be5c82-30a2-4ec4-8d8d-a087ad293e27\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.275748 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.275775 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dh45k\" (UniqueName: \"kubernetes.io/projected/3c60211d-f049-43ab-b32d-ea63f4bc8b7d-kube-api-access-dh45k\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.276284 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19be5c82-30a2-4ec4-8d8d-a087ad293e27-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19be5c82-30a2-4ec4-8d8d-a087ad293e27" (UID: "19be5c82-30a2-4ec4-8d8d-a087ad293e27"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.279138 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19be5c82-30a2-4ec4-8d8d-a087ad293e27-kube-api-access-p7bfh" (OuterVolumeSpecName: "kube-api-access-p7bfh") pod "19be5c82-30a2-4ec4-8d8d-a087ad293e27" (UID: "19be5c82-30a2-4ec4-8d8d-a087ad293e27"). InnerVolumeSpecName "kube-api-access-p7bfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.280516 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.288898 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.303754 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.316380 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.377613 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7bfh\" (UniqueName: \"kubernetes.io/projected/19be5c82-30a2-4ec4-8d8d-a087ad293e27-kube-api-access-p7bfh\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.377643 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19be5c82-30a2-4ec4-8d8d-a087ad293e27-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478663 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmshq\" (UniqueName: \"kubernetes.io/projected/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-kube-api-access-cmshq\") pod \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478724 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4754f5d0-b42f-43e6-987d-f2b28dba3afd-operator-scripts\") pod \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478759 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c525edb-f49c-4350-a3df-c487163f3d31-operator-scripts\") pod \"9c525edb-f49c-4350-a3df-c487163f3d31\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478838 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4hrm\" (UniqueName: \"kubernetes.io/projected/4754f5d0-b42f-43e6-987d-f2b28dba3afd-kube-api-access-g4hrm\") pod \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\" (UID: \"4754f5d0-b42f-43e6-987d-f2b28dba3afd\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478875 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr68n\" (UniqueName: \"kubernetes.io/projected/9c525edb-f49c-4350-a3df-c487163f3d31-kube-api-access-pr68n\") pod \"9c525edb-f49c-4350-a3df-c487163f3d31\" (UID: \"9c525edb-f49c-4350-a3df-c487163f3d31\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478915 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2t99\" (UniqueName: \"kubernetes.io/projected/a2965a20-990b-495d-bfc0-97948383e941-kube-api-access-b2t99\") pod \"a2965a20-990b-495d-bfc0-97948383e941\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478940 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2965a20-990b-495d-bfc0-97948383e941-operator-scripts\") pod \"a2965a20-990b-495d-bfc0-97948383e941\" (UID: \"a2965a20-990b-495d-bfc0-97948383e941\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.478969 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-operator-scripts\") pod \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\" (UID: \"bb7b8ca6-8b45-432a-8b46-83efaf30fe09\") " Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.479637 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4754f5d0-b42f-43e6-987d-f2b28dba3afd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4754f5d0-b42f-43e6-987d-f2b28dba3afd" (UID: "4754f5d0-b42f-43e6-987d-f2b28dba3afd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.479802 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bb7b8ca6-8b45-432a-8b46-83efaf30fe09" (UID: "bb7b8ca6-8b45-432a-8b46-83efaf30fe09"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.479720 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2965a20-990b-495d-bfc0-97948383e941-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2965a20-990b-495d-bfc0-97948383e941" (UID: "a2965a20-990b-495d-bfc0-97948383e941"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.480009 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c525edb-f49c-4350-a3df-c487163f3d31-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9c525edb-f49c-4350-a3df-c487163f3d31" (UID: "9c525edb-f49c-4350-a3df-c487163f3d31"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.481897 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-kube-api-access-cmshq" (OuterVolumeSpecName: "kube-api-access-cmshq") pod "bb7b8ca6-8b45-432a-8b46-83efaf30fe09" (UID: "bb7b8ca6-8b45-432a-8b46-83efaf30fe09"). InnerVolumeSpecName "kube-api-access-cmshq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.482285 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c525edb-f49c-4350-a3df-c487163f3d31-kube-api-access-pr68n" (OuterVolumeSpecName: "kube-api-access-pr68n") pod "9c525edb-f49c-4350-a3df-c487163f3d31" (UID: "9c525edb-f49c-4350-a3df-c487163f3d31"). InnerVolumeSpecName "kube-api-access-pr68n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.482499 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4754f5d0-b42f-43e6-987d-f2b28dba3afd-kube-api-access-g4hrm" (OuterVolumeSpecName: "kube-api-access-g4hrm") pod "4754f5d0-b42f-43e6-987d-f2b28dba3afd" (UID: "4754f5d0-b42f-43e6-987d-f2b28dba3afd"). InnerVolumeSpecName "kube-api-access-g4hrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.482843 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2965a20-990b-495d-bfc0-97948383e941-kube-api-access-b2t99" (OuterVolumeSpecName: "kube-api-access-b2t99") pod "a2965a20-990b-495d-bfc0-97948383e941" (UID: "a2965a20-990b-495d-bfc0-97948383e941"). InnerVolumeSpecName "kube-api-access-b2t99". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581287 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmshq\" (UniqueName: \"kubernetes.io/projected/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-kube-api-access-cmshq\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581323 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4754f5d0-b42f-43e6-987d-f2b28dba3afd-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581333 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c525edb-f49c-4350-a3df-c487163f3d31-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581364 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4hrm\" (UniqueName: \"kubernetes.io/projected/4754f5d0-b42f-43e6-987d-f2b28dba3afd-kube-api-access-g4hrm\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581375 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr68n\" (UniqueName: \"kubernetes.io/projected/9c525edb-f49c-4350-a3df-c487163f3d31-kube-api-access-pr68n\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581384 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2t99\" (UniqueName: \"kubernetes.io/projected/a2965a20-990b-495d-bfc0-97948383e941-kube-api-access-b2t99\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581392 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2965a20-990b-495d-bfc0-97948383e941-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.581400 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bb7b8ca6-8b45-432a-8b46-83efaf30fe09-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.628629 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-786c-account-create-vxgsn" event={"ID":"4754f5d0-b42f-43e6-987d-f2b28dba3afd","Type":"ContainerDied","Data":"de2130293b589cbda4459c8006b9007b1a7276cb83e3b14b259396bdb6b38dea"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.628663 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de2130293b589cbda4459c8006b9007b1a7276cb83e3b14b259396bdb6b38dea" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.628970 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-786c-account-create-vxgsn" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.630536 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-f52rd" event={"ID":"a2965a20-990b-495d-bfc0-97948383e941","Type":"ContainerDied","Data":"23804565760fcc52db4690bb4de808c3ab35befea756e0d64929d679960db009"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.630643 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23804565760fcc52db4690bb4de808c3ab35befea756e0d64929d679960db009" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.630550 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-f52rd" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.632336 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ab01-account-create-7c2ms" event={"ID":"9c525edb-f49c-4350-a3df-c487163f3d31","Type":"ContainerDied","Data":"7cecd613f2a36a7d30a6316ec5e55bf034655534c10ea79120db1c084de37a9f"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.632373 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cecd613f2a36a7d30a6316ec5e55bf034655534c10ea79120db1c084de37a9f" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.632401 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ab01-account-create-7c2ms" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.634148 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-gbd6q" event={"ID":"3c60211d-f049-43ab-b32d-ea63f4bc8b7d","Type":"ContainerDied","Data":"029bd82769871d4ae90d07a4be8a60ebf046fafc94fb4a058defbe8e43e75c05"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.634181 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-gbd6q" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.634182 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="029bd82769871d4ae90d07a4be8a60ebf046fafc94fb4a058defbe8e43e75c05" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.636140 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d412-account-create-llfmj" event={"ID":"19be5c82-30a2-4ec4-8d8d-a087ad293e27","Type":"ContainerDied","Data":"16c759e0fc6f9d0a29146a5fa610e96768109512e3477d05c2df9ff54ff5c8bf"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.636168 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d412-account-create-llfmj" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.636569 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16c759e0fc6f9d0a29146a5fa610e96768109512e3477d05c2df9ff54ff5c8bf" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.638062 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r2pkk" event={"ID":"bb7b8ca6-8b45-432a-8b46-83efaf30fe09","Type":"ContainerDied","Data":"2e086a1a8e89676991eae53c28e0c5557b36401be2ae65801364cd90a64f31fb"} Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.638091 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e086a1a8e89676991eae53c28e0c5557b36401be2ae65801364cd90a64f31fb" Nov 25 16:02:32 crc kubenswrapper[4879]: I1125 16:02:32.638095 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r2pkk" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.002860 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k8bkz"] Nov 25 16:02:34 crc kubenswrapper[4879]: E1125 16:02:34.003633 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19be5c82-30a2-4ec4-8d8d-a087ad293e27" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.003651 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="19be5c82-30a2-4ec4-8d8d-a087ad293e27" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: E1125 16:02:34.003673 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2965a20-990b-495d-bfc0-97948383e941" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.003682 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2965a20-990b-495d-bfc0-97948383e941" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: E1125 16:02:34.003712 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7b8ca6-8b45-432a-8b46-83efaf30fe09" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.003744 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7b8ca6-8b45-432a-8b46-83efaf30fe09" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: E1125 16:02:34.003764 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4754f5d0-b42f-43e6-987d-f2b28dba3afd" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.003772 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4754f5d0-b42f-43e6-987d-f2b28dba3afd" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: E1125 16:02:34.003793 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c60211d-f049-43ab-b32d-ea63f4bc8b7d" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.003801 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c60211d-f049-43ab-b32d-ea63f4bc8b7d" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: E1125 16:02:34.003818 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c525edb-f49c-4350-a3df-c487163f3d31" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.003828 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c525edb-f49c-4350-a3df-c487163f3d31" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004030 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c525edb-f49c-4350-a3df-c487163f3d31" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004051 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb7b8ca6-8b45-432a-8b46-83efaf30fe09" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004068 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="19be5c82-30a2-4ec4-8d8d-a087ad293e27" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004085 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c60211d-f049-43ab-b32d-ea63f4bc8b7d" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004136 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4754f5d0-b42f-43e6-987d-f2b28dba3afd" containerName="mariadb-account-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004150 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2965a20-990b-495d-bfc0-97948383e941" containerName="mariadb-database-create" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.004911 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.007692 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vqp4g" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.009633 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.011067 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.040569 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k8bkz"] Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.106195 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-config-data\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.106390 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsvcv\" (UniqueName: \"kubernetes.io/projected/083a4b41-2786-4285-a0db-b16ad4840857-kube-api-access-lsvcv\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.106465 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.106527 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-scripts\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.209096 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-scripts\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.209219 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-config-data\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.209312 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsvcv\" (UniqueName: \"kubernetes.io/projected/083a4b41-2786-4285-a0db-b16ad4840857-kube-api-access-lsvcv\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.209367 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.213303 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-scripts\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.214378 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.219587 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-config-data\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.226064 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsvcv\" (UniqueName: \"kubernetes.io/projected/083a4b41-2786-4285-a0db-b16ad4840857-kube-api-access-lsvcv\") pod \"nova-cell0-conductor-db-sync-k8bkz\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.326455 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.563572 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k8bkz"] Nov 25 16:02:34 crc kubenswrapper[4879]: I1125 16:02:34.659082 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" event={"ID":"083a4b41-2786-4285-a0db-b16ad4840857","Type":"ContainerStarted","Data":"628e097b09c6676d15e3f17556b4c4a80294921e9dfac9c8d3ecedfd4f324978"} Nov 25 16:02:35 crc kubenswrapper[4879]: I1125 16:02:35.675983 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" event={"ID":"083a4b41-2786-4285-a0db-b16ad4840857","Type":"ContainerStarted","Data":"fbb7482b2235dae14390286d4fb6362c587b47ad9b54917eec3c6cdfb5e8bb26"} Nov 25 16:02:35 crc kubenswrapper[4879]: I1125 16:02:35.695212 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" podStartSLOduration=2.695194184 podStartE2EDuration="2.695194184s" podCreationTimestamp="2025-11-25 16:02:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:35.692583543 +0000 UTC m=+5847.295996624" watchObservedRunningTime="2025-11-25 16:02:35.695194184 +0000 UTC m=+5847.298607255" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.072155 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tbdgj"] Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.075031 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.087362 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tbdgj"] Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.200602 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-catalog-content\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.201099 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-utilities\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.201159 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85nxt\" (UniqueName: \"kubernetes.io/projected/a305c5c6-6936-4e3f-8d53-2748fa1a7646-kube-api-access-85nxt\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.303173 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-utilities\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.303233 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85nxt\" (UniqueName: \"kubernetes.io/projected/a305c5c6-6936-4e3f-8d53-2748fa1a7646-kube-api-access-85nxt\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.303272 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-catalog-content\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.303831 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-catalog-content\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.303827 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-utilities\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.323041 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85nxt\" (UniqueName: \"kubernetes.io/projected/a305c5c6-6936-4e3f-8d53-2748fa1a7646-kube-api-access-85nxt\") pod \"community-operators-tbdgj\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.401137 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:39 crc kubenswrapper[4879]: I1125 16:02:39.925161 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tbdgj"] Nov 25 16:02:40 crc kubenswrapper[4879]: I1125 16:02:40.723082 4879 generic.go:334] "Generic (PLEG): container finished" podID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerID="7af51bb2c98c7592151316fdd569030d016b556d373c933b5981dce850bb1f7b" exitCode=0 Nov 25 16:02:40 crc kubenswrapper[4879]: I1125 16:02:40.723181 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerDied","Data":"7af51bb2c98c7592151316fdd569030d016b556d373c933b5981dce850bb1f7b"} Nov 25 16:02:40 crc kubenswrapper[4879]: I1125 16:02:40.723408 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerStarted","Data":"719d46ef0560d1cfaa30068fc488e15bf5aa8f2ac2d0ebb7c69cf1ce4f65fc87"} Nov 25 16:02:41 crc kubenswrapper[4879]: I1125 16:02:41.734694 4879 generic.go:334] "Generic (PLEG): container finished" podID="083a4b41-2786-4285-a0db-b16ad4840857" containerID="fbb7482b2235dae14390286d4fb6362c587b47ad9b54917eec3c6cdfb5e8bb26" exitCode=0 Nov 25 16:02:41 crc kubenswrapper[4879]: I1125 16:02:41.735088 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" event={"ID":"083a4b41-2786-4285-a0db-b16ad4840857","Type":"ContainerDied","Data":"fbb7482b2235dae14390286d4fb6362c587b47ad9b54917eec3c6cdfb5e8bb26"} Nov 25 16:02:41 crc kubenswrapper[4879]: I1125 16:02:41.738800 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerStarted","Data":"35b748eeb6c71a2ad3f30f2d3d96cb51424cbea1fd06fedf8f5b292db1748417"} Nov 25 16:02:42 crc kubenswrapper[4879]: I1125 16:02:42.748220 4879 generic.go:334] "Generic (PLEG): container finished" podID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerID="35b748eeb6c71a2ad3f30f2d3d96cb51424cbea1fd06fedf8f5b292db1748417" exitCode=0 Nov 25 16:02:42 crc kubenswrapper[4879]: I1125 16:02:42.748307 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerDied","Data":"35b748eeb6c71a2ad3f30f2d3d96cb51424cbea1fd06fedf8f5b292db1748417"} Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.063280 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.165775 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-scripts\") pod \"083a4b41-2786-4285-a0db-b16ad4840857\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.166193 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-combined-ca-bundle\") pod \"083a4b41-2786-4285-a0db-b16ad4840857\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.166239 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsvcv\" (UniqueName: \"kubernetes.io/projected/083a4b41-2786-4285-a0db-b16ad4840857-kube-api-access-lsvcv\") pod \"083a4b41-2786-4285-a0db-b16ad4840857\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.166284 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-config-data\") pod \"083a4b41-2786-4285-a0db-b16ad4840857\" (UID: \"083a4b41-2786-4285-a0db-b16ad4840857\") " Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.171465 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/083a4b41-2786-4285-a0db-b16ad4840857-kube-api-access-lsvcv" (OuterVolumeSpecName: "kube-api-access-lsvcv") pod "083a4b41-2786-4285-a0db-b16ad4840857" (UID: "083a4b41-2786-4285-a0db-b16ad4840857"). InnerVolumeSpecName "kube-api-access-lsvcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.171492 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-scripts" (OuterVolumeSpecName: "scripts") pod "083a4b41-2786-4285-a0db-b16ad4840857" (UID: "083a4b41-2786-4285-a0db-b16ad4840857"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.192071 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-config-data" (OuterVolumeSpecName: "config-data") pod "083a4b41-2786-4285-a0db-b16ad4840857" (UID: "083a4b41-2786-4285-a0db-b16ad4840857"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.198952 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "083a4b41-2786-4285-a0db-b16ad4840857" (UID: "083a4b41-2786-4285-a0db-b16ad4840857"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.268619 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.268655 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.268669 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsvcv\" (UniqueName: \"kubernetes.io/projected/083a4b41-2786-4285-a0db-b16ad4840857-kube-api-access-lsvcv\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.268681 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083a4b41-2786-4285-a0db-b16ad4840857-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.758746 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.758736 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-k8bkz" event={"ID":"083a4b41-2786-4285-a0db-b16ad4840857","Type":"ContainerDied","Data":"628e097b09c6676d15e3f17556b4c4a80294921e9dfac9c8d3ecedfd4f324978"} Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.758874 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="628e097b09c6676d15e3f17556b4c4a80294921e9dfac9c8d3ecedfd4f324978" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.761289 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerStarted","Data":"8cb46bf6d2614b8f0e0c5df1e0ca7b718b6659fbec68c74827013ac2156bcfde"} Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.790697 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tbdgj" podStartSLOduration=2.279337732 podStartE2EDuration="4.790679902s" podCreationTimestamp="2025-11-25 16:02:39 +0000 UTC" firstStartedPulling="2025-11-25 16:02:40.726336011 +0000 UTC m=+5852.329749082" lastFinishedPulling="2025-11-25 16:02:43.237678181 +0000 UTC m=+5854.841091252" observedRunningTime="2025-11-25 16:02:43.777813716 +0000 UTC m=+5855.381226787" watchObservedRunningTime="2025-11-25 16:02:43.790679902 +0000 UTC m=+5855.394092973" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.817895 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:02:43 crc kubenswrapper[4879]: E1125 16:02:43.818312 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="083a4b41-2786-4285-a0db-b16ad4840857" containerName="nova-cell0-conductor-db-sync" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.818332 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="083a4b41-2786-4285-a0db-b16ad4840857" containerName="nova-cell0-conductor-db-sync" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.818526 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="083a4b41-2786-4285-a0db-b16ad4840857" containerName="nova-cell0-conductor-db-sync" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.819149 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.821318 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.823590 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vqp4g" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.831773 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.980357 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slwjq\" (UniqueName: \"kubernetes.io/projected/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-kube-api-access-slwjq\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.980734 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:43 crc kubenswrapper[4879]: I1125 16:02:43.980815 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.082526 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.082635 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slwjq\" (UniqueName: \"kubernetes.io/projected/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-kube-api-access-slwjq\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.082680 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.087717 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.091460 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.098938 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slwjq\" (UniqueName: \"kubernetes.io/projected/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-kube-api-access-slwjq\") pod \"nova-cell0-conductor-0\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.137369 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.578858 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:02:44 crc kubenswrapper[4879]: W1125 16:02:44.586479 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf5d8a4f_6f5f_4381_8282_abf6b2b2e627.slice/crio-b43f000034d98630f977e7bc7c10588a050a9539262b518276d23cc11b7c6815 WatchSource:0}: Error finding container b43f000034d98630f977e7bc7c10588a050a9539262b518276d23cc11b7c6815: Status 404 returned error can't find the container with id b43f000034d98630f977e7bc7c10588a050a9539262b518276d23cc11b7c6815 Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.645224 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:02:44 crc kubenswrapper[4879]: E1125 16:02:44.645437 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.770798 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627","Type":"ContainerStarted","Data":"b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f"} Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.770867 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627","Type":"ContainerStarted","Data":"b43f000034d98630f977e7bc7c10588a050a9539262b518276d23cc11b7c6815"} Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.770883 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:44 crc kubenswrapper[4879]: I1125 16:02:44.788398 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.78838263 podStartE2EDuration="1.78838263s" podCreationTimestamp="2025-11-25 16:02:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:44.783953981 +0000 UTC m=+5856.387367052" watchObservedRunningTime="2025-11-25 16:02:44.78838263 +0000 UTC m=+5856.391795701" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.164838 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.402384 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.402448 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.444312 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.579426 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-mnrtm"] Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.581276 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.584803 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.588990 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.593667 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-mnrtm"] Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.676277 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-config-data\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.676413 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw7gh\" (UniqueName: \"kubernetes.io/projected/cd000221-dd8b-4a66-9dfd-14d894fe8d37-kube-api-access-fw7gh\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.676445 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.676475 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-scripts\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.722141 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.727064 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.729797 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.734971 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.778478 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-config-data\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.778643 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw7gh\" (UniqueName: \"kubernetes.io/projected/cd000221-dd8b-4a66-9dfd-14d894fe8d37-kube-api-access-fw7gh\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.778674 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.778699 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-scripts\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.791115 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.818851 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-config-data\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.824320 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-scripts\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.828038 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw7gh\" (UniqueName: \"kubernetes.io/projected/cd000221-dd8b-4a66-9dfd-14d894fe8d37-kube-api-access-fw7gh\") pod \"nova-cell0-cell-mapping-mnrtm\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.828141 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.830195 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.833580 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.856532 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.868247 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.886282 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.900683 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94mdc\" (UniqueName: \"kubernetes.io/projected/9f12e5a6-a8a9-4929-846f-e35a4665416e-kube-api-access-94mdc\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.900748 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f12e5a6-a8a9-4929-846f-e35a4665416e-logs\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.900823 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-config-data\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.900905 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.904040 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.908880 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:49 crc kubenswrapper[4879]: I1125 16:02:49.943049 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:49.992262 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002541 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002576 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj9cq\" (UniqueName: \"kubernetes.io/projected/8bdf2df1-26e5-4538-a012-f742d79f31ec-kube-api-access-nj9cq\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002601 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002652 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b541837-b390-427f-b7c0-7b72d64aa30d-logs\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002677 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002705 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94mdc\" (UniqueName: \"kubernetes.io/projected/9f12e5a6-a8a9-4929-846f-e35a4665416e-kube-api-access-94mdc\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002745 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f12e5a6-a8a9-4929-846f-e35a4665416e-logs\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002789 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-config-data\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002812 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-config-data\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002848 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-config-data\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.002875 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb74c\" (UniqueName: \"kubernetes.io/projected/5b541837-b390-427f-b7c0-7b72d64aa30d-kube-api-access-xb74c\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.003422 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f12e5a6-a8a9-4929-846f-e35a4665416e-logs\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.007282 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.009786 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-config-data\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.010491 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.016299 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.020189 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94mdc\" (UniqueName: \"kubernetes.io/projected/9f12e5a6-a8a9-4929-846f-e35a4665416e-kube-api-access-94mdc\") pod \"nova-api-0\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.020580 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.024350 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.045597 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.075545 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84c5b75c-qw96h"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.077402 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.086270 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c5b75c-qw96h"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.094924 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tbdgj"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.105905 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106136 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106241 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106545 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-config-data\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106600 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-config-data\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106685 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb74c\" (UniqueName: \"kubernetes.io/projected/5b541837-b390-427f-b7c0-7b72d64aa30d-kube-api-access-xb74c\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106769 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp8f2\" (UniqueName: \"kubernetes.io/projected/3b1146ac-90b0-4934-8280-626f8842aa5a-kube-api-access-sp8f2\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106807 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106832 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj9cq\" (UniqueName: \"kubernetes.io/projected/8bdf2df1-26e5-4538-a012-f742d79f31ec-kube-api-access-nj9cq\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.106990 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b541837-b390-427f-b7c0-7b72d64aa30d-logs\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.107541 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b541837-b390-427f-b7c0-7b72d64aa30d-logs\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.111563 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.117083 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-config-data\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.117245 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-config-data\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.119072 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.125630 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj9cq\" (UniqueName: \"kubernetes.io/projected/8bdf2df1-26e5-4538-a012-f742d79f31ec-kube-api-access-nj9cq\") pod \"nova-scheduler-0\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.128169 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb74c\" (UniqueName: \"kubernetes.io/projected/5b541837-b390-427f-b7c0-7b72d64aa30d-kube-api-access-xb74c\") pod \"nova-metadata-0\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209475 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209521 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209564 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tv2g\" (UniqueName: \"kubernetes.io/projected/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-kube-api-access-4tv2g\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209600 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-nb\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209621 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-sb\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209708 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp8f2\" (UniqueName: \"kubernetes.io/projected/3b1146ac-90b0-4934-8280-626f8842aa5a-kube-api-access-sp8f2\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209754 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-dns-svc\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.209791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-config\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.214935 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.215067 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.222704 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.228708 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp8f2\" (UniqueName: \"kubernetes.io/projected/3b1146ac-90b0-4934-8280-626f8842aa5a-kube-api-access-sp8f2\") pod \"nova-cell1-novncproxy-0\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.233678 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.310947 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-dns-svc\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.311001 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-config\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.311066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tv2g\" (UniqueName: \"kubernetes.io/projected/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-kube-api-access-4tv2g\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.311088 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-nb\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.311140 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-sb\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.312727 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-config\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.312903 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-sb\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.313283 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-dns-svc\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.313645 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-nb\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.333809 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tv2g\" (UniqueName: \"kubernetes.io/projected/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-kube-api-access-4tv2g\") pod \"dnsmasq-dns-84c5b75c-qw96h\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.409245 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.425574 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.510979 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-mnrtm"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.626244 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: W1125 16:02:50.637246 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f12e5a6_a8a9_4929_846f_e35a4665416e.slice/crio-411f9df1d19b9dbf4981059ce24a834393ca880549ae8eebf0ac5d51a8d95cad WatchSource:0}: Error finding container 411f9df1d19b9dbf4981059ce24a834393ca880549ae8eebf0ac5d51a8d95cad: Status 404 returned error can't find the container with id 411f9df1d19b9dbf4981059ce24a834393ca880549ae8eebf0ac5d51a8d95cad Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.641004 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmphj"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.642806 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.645827 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.649093 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.653007 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmphj"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.755717 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.822255 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq4tp\" (UniqueName: \"kubernetes.io/projected/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-kube-api-access-vq4tp\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.822309 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.822332 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-config-data\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.822417 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-scripts\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.828596 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mnrtm" event={"ID":"cd000221-dd8b-4a66-9dfd-14d894fe8d37","Type":"ContainerStarted","Data":"e6f06990b782993a96b7d5ee073acf7985edfa0a52a06bcc03da16e98454835c"} Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.828643 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mnrtm" event={"ID":"cd000221-dd8b-4a66-9dfd-14d894fe8d37","Type":"ContainerStarted","Data":"c8cae868d19281cd09e952b17e99a21c33dd6d1f60abbea40f3484376ee1cc62"} Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.830463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f12e5a6-a8a9-4929-846f-e35a4665416e","Type":"ContainerStarted","Data":"411f9df1d19b9dbf4981059ce24a834393ca880549ae8eebf0ac5d51a8d95cad"} Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.834829 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8bdf2df1-26e5-4538-a012-f742d79f31ec","Type":"ContainerStarted","Data":"7f376bf5d7f0ca2b015a7c08541b22638ce80eab0ee168c473c7c7ebc65dcc19"} Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.845275 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.846944 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-mnrtm" podStartSLOduration=1.8469335949999999 podStartE2EDuration="1.846933595s" podCreationTimestamp="2025-11-25 16:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:50.846221956 +0000 UTC m=+5862.449635027" watchObservedRunningTime="2025-11-25 16:02:50.846933595 +0000 UTC m=+5862.450346656" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.924186 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq4tp\" (UniqueName: \"kubernetes.io/projected/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-kube-api-access-vq4tp\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.924247 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.924285 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-config-data\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.924403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-scripts\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.938226 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-scripts\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.941080 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-config-data\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.942626 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.945522 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq4tp\" (UniqueName: \"kubernetes.io/projected/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-kube-api-access-vq4tp\") pod \"nova-cell1-conductor-db-sync-bmphj\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.952197 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:02:50 crc kubenswrapper[4879]: W1125 16:02:50.952614 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b1146ac_90b0_4934_8280_626f8842aa5a.slice/crio-a9ac65e09ec44fa52acf83a27558d04b1146002dc5eb478ba737aa7a93ff6117 WatchSource:0}: Error finding container a9ac65e09ec44fa52acf83a27558d04b1146002dc5eb478ba737aa7a93ff6117: Status 404 returned error can't find the container with id a9ac65e09ec44fa52acf83a27558d04b1146002dc5eb478ba737aa7a93ff6117 Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.962373 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84c5b75c-qw96h"] Nov 25 16:02:50 crc kubenswrapper[4879]: I1125 16:02:50.970208 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.432304 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmphj"] Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.845838 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3b1146ac-90b0-4934-8280-626f8842aa5a","Type":"ContainerStarted","Data":"c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.846189 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3b1146ac-90b0-4934-8280-626f8842aa5a","Type":"ContainerStarted","Data":"a9ac65e09ec44fa52acf83a27558d04b1146002dc5eb478ba737aa7a93ff6117"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.848457 4879 generic.go:334] "Generic (PLEG): container finished" podID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerID="6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1" exitCode=0 Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.848542 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" event={"ID":"7c70edcb-70c0-4074-8e4b-45e6b0e3b171","Type":"ContainerDied","Data":"6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.848602 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" event={"ID":"7c70edcb-70c0-4074-8e4b-45e6b0e3b171","Type":"ContainerStarted","Data":"7729cfb48031c5cea4411fca13dc239addb36cbd1a4af0e4ac43a59272d55b43"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.851826 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f12e5a6-a8a9-4929-846f-e35a4665416e","Type":"ContainerStarted","Data":"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.851874 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f12e5a6-a8a9-4929-846f-e35a4665416e","Type":"ContainerStarted","Data":"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.854576 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8bdf2df1-26e5-4538-a012-f742d79f31ec","Type":"ContainerStarted","Data":"c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.857635 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmphj" event={"ID":"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d","Type":"ContainerStarted","Data":"a98bf795693383c6b96bbbf33d40aafd54704a87d2152f5770b978eb6e4420c0"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.857695 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmphj" event={"ID":"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d","Type":"ContainerStarted","Data":"e958fb380c1b3ce3168c0b53e029ce1665f6828dbc131a2c83a29fd6d522a0fa"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.863660 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5b541837-b390-427f-b7c0-7b72d64aa30d","Type":"ContainerStarted","Data":"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.863703 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5b541837-b390-427f-b7c0-7b72d64aa30d","Type":"ContainerStarted","Data":"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.863713 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5b541837-b390-427f-b7c0-7b72d64aa30d","Type":"ContainerStarted","Data":"5cd315eaae1cdb6b7ff57f62220cbf4fa366e7f5a95513476af03e855b288fbe"} Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.864648 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tbdgj" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="registry-server" containerID="cri-o://8cb46bf6d2614b8f0e0c5df1e0ca7b718b6659fbec68c74827013ac2156bcfde" gracePeriod=2 Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.885155 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.885131883 podStartE2EDuration="2.885131883s" podCreationTimestamp="2025-11-25 16:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:51.880149778 +0000 UTC m=+5863.483562869" watchObservedRunningTime="2025-11-25 16:02:51.885131883 +0000 UTC m=+5863.488544954" Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.934366 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-bmphj" podStartSLOduration=1.934347627 podStartE2EDuration="1.934347627s" podCreationTimestamp="2025-11-25 16:02:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:51.921170193 +0000 UTC m=+5863.524583274" watchObservedRunningTime="2025-11-25 16:02:51.934347627 +0000 UTC m=+5863.537760698" Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.943009 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.942987559 podStartE2EDuration="2.942987559s" podCreationTimestamp="2025-11-25 16:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:51.93559528 +0000 UTC m=+5863.539008351" watchObservedRunningTime="2025-11-25 16:02:51.942987559 +0000 UTC m=+5863.546400630" Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.961911 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.961890448 podStartE2EDuration="2.961890448s" podCreationTimestamp="2025-11-25 16:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:51.952589488 +0000 UTC m=+5863.556002559" watchObservedRunningTime="2025-11-25 16:02:51.961890448 +0000 UTC m=+5863.565303519" Nov 25 16:02:51 crc kubenswrapper[4879]: I1125 16:02:51.986767 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.986748847 podStartE2EDuration="2.986748847s" podCreationTimestamp="2025-11-25 16:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:51.97388112 +0000 UTC m=+5863.577294191" watchObservedRunningTime="2025-11-25 16:02:51.986748847 +0000 UTC m=+5863.590161918" Nov 25 16:02:52 crc kubenswrapper[4879]: I1125 16:02:52.882374 4879 generic.go:334] "Generic (PLEG): container finished" podID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerID="8cb46bf6d2614b8f0e0c5df1e0ca7b718b6659fbec68c74827013ac2156bcfde" exitCode=0 Nov 25 16:02:52 crc kubenswrapper[4879]: I1125 16:02:52.882590 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerDied","Data":"8cb46bf6d2614b8f0e0c5df1e0ca7b718b6659fbec68c74827013ac2156bcfde"} Nov 25 16:02:52 crc kubenswrapper[4879]: I1125 16:02:52.887696 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" event={"ID":"7c70edcb-70c0-4074-8e4b-45e6b0e3b171","Type":"ContainerStarted","Data":"f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5"} Nov 25 16:02:52 crc kubenswrapper[4879]: I1125 16:02:52.915818 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" podStartSLOduration=3.915796997 podStartE2EDuration="3.915796997s" podCreationTimestamp="2025-11-25 16:02:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:52.908618794 +0000 UTC m=+5864.512031865" watchObservedRunningTime="2025-11-25 16:02:52.915796997 +0000 UTC m=+5864.519210068" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.133030 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.291206 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-utilities\") pod \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.291334 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85nxt\" (UniqueName: \"kubernetes.io/projected/a305c5c6-6936-4e3f-8d53-2748fa1a7646-kube-api-access-85nxt\") pod \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.291471 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-catalog-content\") pod \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\" (UID: \"a305c5c6-6936-4e3f-8d53-2748fa1a7646\") " Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.297879 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-utilities" (OuterVolumeSpecName: "utilities") pod "a305c5c6-6936-4e3f-8d53-2748fa1a7646" (UID: "a305c5c6-6936-4e3f-8d53-2748fa1a7646"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.303624 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a305c5c6-6936-4e3f-8d53-2748fa1a7646-kube-api-access-85nxt" (OuterVolumeSpecName: "kube-api-access-85nxt") pod "a305c5c6-6936-4e3f-8d53-2748fa1a7646" (UID: "a305c5c6-6936-4e3f-8d53-2748fa1a7646"). InnerVolumeSpecName "kube-api-access-85nxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.345481 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a305c5c6-6936-4e3f-8d53-2748fa1a7646" (UID: "a305c5c6-6936-4e3f-8d53-2748fa1a7646"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.394189 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.394268 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85nxt\" (UniqueName: \"kubernetes.io/projected/a305c5c6-6936-4e3f-8d53-2748fa1a7646-kube-api-access-85nxt\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.394282 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a305c5c6-6936-4e3f-8d53-2748fa1a7646-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.919624 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tbdgj" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.920502 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tbdgj" event={"ID":"a305c5c6-6936-4e3f-8d53-2748fa1a7646","Type":"ContainerDied","Data":"719d46ef0560d1cfaa30068fc488e15bf5aa8f2ac2d0ebb7c69cf1ce4f65fc87"} Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.920557 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.920578 4879 scope.go:117] "RemoveContainer" containerID="8cb46bf6d2614b8f0e0c5df1e0ca7b718b6659fbec68c74827013ac2156bcfde" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.951678 4879 scope.go:117] "RemoveContainer" containerID="35b748eeb6c71a2ad3f30f2d3d96cb51424cbea1fd06fedf8f5b292db1748417" Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.951730 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tbdgj"] Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.965572 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tbdgj"] Nov 25 16:02:53 crc kubenswrapper[4879]: I1125 16:02:53.987352 4879 scope.go:117] "RemoveContainer" containerID="7af51bb2c98c7592151316fdd569030d016b556d373c933b5981dce850bb1f7b" Nov 25 16:02:54 crc kubenswrapper[4879]: I1125 16:02:54.933746 4879 generic.go:334] "Generic (PLEG): container finished" podID="c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" containerID="a98bf795693383c6b96bbbf33d40aafd54704a87d2152f5770b978eb6e4420c0" exitCode=0 Nov 25 16:02:54 crc kubenswrapper[4879]: I1125 16:02:54.933846 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmphj" event={"ID":"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d","Type":"ContainerDied","Data":"a98bf795693383c6b96bbbf33d40aafd54704a87d2152f5770b978eb6e4420c0"} Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.223602 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.223967 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.233984 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.409467 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.656320 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" path="/var/lib/kubelet/pods/a305c5c6-6936-4e3f-8d53-2748fa1a7646/volumes" Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.946497 4879 generic.go:334] "Generic (PLEG): container finished" podID="cd000221-dd8b-4a66-9dfd-14d894fe8d37" containerID="e6f06990b782993a96b7d5ee073acf7985edfa0a52a06bcc03da16e98454835c" exitCode=0 Nov 25 16:02:55 crc kubenswrapper[4879]: I1125 16:02:55.946600 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mnrtm" event={"ID":"cd000221-dd8b-4a66-9dfd-14d894fe8d37","Type":"ContainerDied","Data":"e6f06990b782993a96b7d5ee073acf7985edfa0a52a06bcc03da16e98454835c"} Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.261274 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.349836 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq4tp\" (UniqueName: \"kubernetes.io/projected/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-kube-api-access-vq4tp\") pod \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.349979 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-scripts\") pod \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.350002 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-combined-ca-bundle\") pod \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.350091 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-config-data\") pod \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\" (UID: \"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d\") " Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.357368 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-kube-api-access-vq4tp" (OuterVolumeSpecName: "kube-api-access-vq4tp") pod "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" (UID: "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d"). InnerVolumeSpecName "kube-api-access-vq4tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.357906 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-scripts" (OuterVolumeSpecName: "scripts") pod "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" (UID: "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.378568 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-config-data" (OuterVolumeSpecName: "config-data") pod "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" (UID: "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.382616 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" (UID: "c54f8d8e-c352-4bb0-b0db-8afbdb6c944d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.452467 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq4tp\" (UniqueName: \"kubernetes.io/projected/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-kube-api-access-vq4tp\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.452507 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.452522 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.452534 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.957433 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmphj" event={"ID":"c54f8d8e-c352-4bb0-b0db-8afbdb6c944d","Type":"ContainerDied","Data":"e958fb380c1b3ce3168c0b53e029ce1665f6828dbc131a2c83a29fd6d522a0fa"} Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.957482 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e958fb380c1b3ce3168c0b53e029ce1665f6828dbc131a2c83a29fd6d522a0fa" Nov 25 16:02:56 crc kubenswrapper[4879]: I1125 16:02:56.957610 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmphj" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.027547 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:02:57 crc kubenswrapper[4879]: E1125 16:02:57.028099 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="extract-content" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.028128 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="extract-content" Nov 25 16:02:57 crc kubenswrapper[4879]: E1125 16:02:57.028174 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="registry-server" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.028185 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="registry-server" Nov 25 16:02:57 crc kubenswrapper[4879]: E1125 16:02:57.028207 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="extract-utilities" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.028215 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="extract-utilities" Nov 25 16:02:57 crc kubenswrapper[4879]: E1125 16:02:57.028264 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" containerName="nova-cell1-conductor-db-sync" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.028272 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" containerName="nova-cell1-conductor-db-sync" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.028520 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a305c5c6-6936-4e3f-8d53-2748fa1a7646" containerName="registry-server" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.028549 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" containerName="nova-cell1-conductor-db-sync" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.029580 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.031863 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.038448 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.165351 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkdht\" (UniqueName: \"kubernetes.io/projected/ced1f22f-b283-43b3-a85a-e591ff1dbb27-kube-api-access-tkdht\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.165488 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.165527 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.267389 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.267455 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.267626 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkdht\" (UniqueName: \"kubernetes.io/projected/ced1f22f-b283-43b3-a85a-e591ff1dbb27-kube-api-access-tkdht\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.274034 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.274810 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.283604 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkdht\" (UniqueName: \"kubernetes.io/projected/ced1f22f-b283-43b3-a85a-e591ff1dbb27-kube-api-access-tkdht\") pod \"nova-cell1-conductor-0\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.354769 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.370043 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.470181 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw7gh\" (UniqueName: \"kubernetes.io/projected/cd000221-dd8b-4a66-9dfd-14d894fe8d37-kube-api-access-fw7gh\") pod \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.470500 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-combined-ca-bundle\") pod \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.470583 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-config-data\") pod \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.470665 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-scripts\") pod \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\" (UID: \"cd000221-dd8b-4a66-9dfd-14d894fe8d37\") " Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.475341 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-scripts" (OuterVolumeSpecName: "scripts") pod "cd000221-dd8b-4a66-9dfd-14d894fe8d37" (UID: "cd000221-dd8b-4a66-9dfd-14d894fe8d37"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.475449 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd000221-dd8b-4a66-9dfd-14d894fe8d37-kube-api-access-fw7gh" (OuterVolumeSpecName: "kube-api-access-fw7gh") pod "cd000221-dd8b-4a66-9dfd-14d894fe8d37" (UID: "cd000221-dd8b-4a66-9dfd-14d894fe8d37"). InnerVolumeSpecName "kube-api-access-fw7gh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.496332 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd000221-dd8b-4a66-9dfd-14d894fe8d37" (UID: "cd000221-dd8b-4a66-9dfd-14d894fe8d37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.496718 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-config-data" (OuterVolumeSpecName: "config-data") pod "cd000221-dd8b-4a66-9dfd-14d894fe8d37" (UID: "cd000221-dd8b-4a66-9dfd-14d894fe8d37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.572404 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw7gh\" (UniqueName: \"kubernetes.io/projected/cd000221-dd8b-4a66-9dfd-14d894fe8d37-kube-api-access-fw7gh\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.572641 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.581777 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.581798 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd000221-dd8b-4a66-9dfd-14d894fe8d37-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.787237 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.969719 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ced1f22f-b283-43b3-a85a-e591ff1dbb27","Type":"ContainerStarted","Data":"666b9994e53fb06538c80a83678bf387cebfa43ef1b6ae158ff69ade37c21dcb"} Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.969772 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ced1f22f-b283-43b3-a85a-e591ff1dbb27","Type":"ContainerStarted","Data":"8e35b49d16b21335bca4972a6bc46afcfffef4f114729e1664026ec8c31322c3"} Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.969855 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.972917 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mnrtm" event={"ID":"cd000221-dd8b-4a66-9dfd-14d894fe8d37","Type":"ContainerDied","Data":"c8cae868d19281cd09e952b17e99a21c33dd6d1f60abbea40f3484376ee1cc62"} Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.973280 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8cae868d19281cd09e952b17e99a21c33dd6d1f60abbea40f3484376ee1cc62" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.972973 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mnrtm" Nov 25 16:02:57 crc kubenswrapper[4879]: I1125 16:02:57.985230 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=0.985213585 podStartE2EDuration="985.213585ms" podCreationTimestamp="2025-11-25 16:02:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:02:57.983034766 +0000 UTC m=+5869.586447837" watchObservedRunningTime="2025-11-25 16:02:57.985213585 +0000 UTC m=+5869.588626656" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.137483 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.137727 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-log" containerID="cri-o://be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4" gracePeriod=30 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.137829 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-api" containerID="cri-o://2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1" gracePeriod=30 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.157901 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.158110 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8bdf2df1-26e5-4538-a012-f742d79f31ec" containerName="nova-scheduler-scheduler" containerID="cri-o://c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f" gracePeriod=30 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.200569 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.200794 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-log" containerID="cri-o://3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf" gracePeriod=30 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.200942 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-metadata" containerID="cri-o://4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c" gracePeriod=30 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.638486 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.648934 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:02:58 crc kubenswrapper[4879]: E1125 16:02:58.649921 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.729273 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.805033 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94mdc\" (UniqueName: \"kubernetes.io/projected/9f12e5a6-a8a9-4929-846f-e35a4665416e-kube-api-access-94mdc\") pod \"9f12e5a6-a8a9-4929-846f-e35a4665416e\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.805458 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-config-data\") pod \"9f12e5a6-a8a9-4929-846f-e35a4665416e\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.805961 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-combined-ca-bundle\") pod \"9f12e5a6-a8a9-4929-846f-e35a4665416e\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.806467 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f12e5a6-a8a9-4929-846f-e35a4665416e-logs\") pod \"9f12e5a6-a8a9-4929-846f-e35a4665416e\" (UID: \"9f12e5a6-a8a9-4929-846f-e35a4665416e\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.807083 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f12e5a6-a8a9-4929-846f-e35a4665416e-logs" (OuterVolumeSpecName: "logs") pod "9f12e5a6-a8a9-4929-846f-e35a4665416e" (UID: "9f12e5a6-a8a9-4929-846f-e35a4665416e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.811055 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f12e5a6-a8a9-4929-846f-e35a4665416e-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.814899 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f12e5a6-a8a9-4929-846f-e35a4665416e-kube-api-access-94mdc" (OuterVolumeSpecName: "kube-api-access-94mdc") pod "9f12e5a6-a8a9-4929-846f-e35a4665416e" (UID: "9f12e5a6-a8a9-4929-846f-e35a4665416e"). InnerVolumeSpecName "kube-api-access-94mdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.836459 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f12e5a6-a8a9-4929-846f-e35a4665416e" (UID: "9f12e5a6-a8a9-4929-846f-e35a4665416e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.842438 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-config-data" (OuterVolumeSpecName: "config-data") pod "9f12e5a6-a8a9-4929-846f-e35a4665416e" (UID: "9f12e5a6-a8a9-4929-846f-e35a4665416e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.912734 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-combined-ca-bundle\") pod \"5b541837-b390-427f-b7c0-7b72d64aa30d\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.912920 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-config-data\") pod \"5b541837-b390-427f-b7c0-7b72d64aa30d\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.913019 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b541837-b390-427f-b7c0-7b72d64aa30d-logs\") pod \"5b541837-b390-427f-b7c0-7b72d64aa30d\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.913057 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb74c\" (UniqueName: \"kubernetes.io/projected/5b541837-b390-427f-b7c0-7b72d64aa30d-kube-api-access-xb74c\") pod \"5b541837-b390-427f-b7c0-7b72d64aa30d\" (UID: \"5b541837-b390-427f-b7c0-7b72d64aa30d\") " Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.913553 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.913579 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f12e5a6-a8a9-4929-846f-e35a4665416e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.913594 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94mdc\" (UniqueName: \"kubernetes.io/projected/9f12e5a6-a8a9-4929-846f-e35a4665416e-kube-api-access-94mdc\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.913807 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b541837-b390-427f-b7c0-7b72d64aa30d-logs" (OuterVolumeSpecName: "logs") pod "5b541837-b390-427f-b7c0-7b72d64aa30d" (UID: "5b541837-b390-427f-b7c0-7b72d64aa30d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.916452 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b541837-b390-427f-b7c0-7b72d64aa30d-kube-api-access-xb74c" (OuterVolumeSpecName: "kube-api-access-xb74c") pod "5b541837-b390-427f-b7c0-7b72d64aa30d" (UID: "5b541837-b390-427f-b7c0-7b72d64aa30d"). InnerVolumeSpecName "kube-api-access-xb74c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.936637 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-config-data" (OuterVolumeSpecName: "config-data") pod "5b541837-b390-427f-b7c0-7b72d64aa30d" (UID: "5b541837-b390-427f-b7c0-7b72d64aa30d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.939309 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b541837-b390-427f-b7c0-7b72d64aa30d" (UID: "5b541837-b390-427f-b7c0-7b72d64aa30d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.985942 4879 generic.go:334] "Generic (PLEG): container finished" podID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerID="2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1" exitCode=0 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.986619 4879 generic.go:334] "Generic (PLEG): container finished" podID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerID="be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4" exitCode=143 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.986384 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.986447 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f12e5a6-a8a9-4929-846f-e35a4665416e","Type":"ContainerDied","Data":"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1"} Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.987142 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f12e5a6-a8a9-4929-846f-e35a4665416e","Type":"ContainerDied","Data":"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4"} Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.987166 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9f12e5a6-a8a9-4929-846f-e35a4665416e","Type":"ContainerDied","Data":"411f9df1d19b9dbf4981059ce24a834393ca880549ae8eebf0ac5d51a8d95cad"} Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.987214 4879 scope.go:117] "RemoveContainer" containerID="2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.991721 4879 generic.go:334] "Generic (PLEG): container finished" podID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerID="4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c" exitCode=0 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.991763 4879 generic.go:334] "Generic (PLEG): container finished" podID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerID="3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf" exitCode=143 Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.992099 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.992739 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5b541837-b390-427f-b7c0-7b72d64aa30d","Type":"ContainerDied","Data":"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c"} Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.992775 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5b541837-b390-427f-b7c0-7b72d64aa30d","Type":"ContainerDied","Data":"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf"} Nov 25 16:02:58 crc kubenswrapper[4879]: I1125 16:02:58.992790 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5b541837-b390-427f-b7c0-7b72d64aa30d","Type":"ContainerDied","Data":"5cd315eaae1cdb6b7ff57f62220cbf4fa366e7f5a95513476af03e855b288fbe"} Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.010886 4879 scope.go:117] "RemoveContainer" containerID="be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.016264 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b541837-b390-427f-b7c0-7b72d64aa30d-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.016310 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb74c\" (UniqueName: \"kubernetes.io/projected/5b541837-b390-427f-b7c0-7b72d64aa30d-kube-api-access-xb74c\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.016322 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.016333 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b541837-b390-427f-b7c0-7b72d64aa30d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.057601 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.062571 4879 scope.go:117] "RemoveContainer" containerID="2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.063172 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1\": container with ID starting with 2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1 not found: ID does not exist" containerID="2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.063217 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1"} err="failed to get container status \"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1\": rpc error: code = NotFound desc = could not find container \"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1\": container with ID starting with 2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1 not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.063247 4879 scope.go:117] "RemoveContainer" containerID="be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.063846 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4\": container with ID starting with be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4 not found: ID does not exist" containerID="be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.063916 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4"} err="failed to get container status \"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4\": rpc error: code = NotFound desc = could not find container \"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4\": container with ID starting with be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4 not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.063949 4879 scope.go:117] "RemoveContainer" containerID="2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.064599 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1"} err="failed to get container status \"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1\": rpc error: code = NotFound desc = could not find container \"2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1\": container with ID starting with 2f9229c05f93056f7bff0a773df65ac17ecd40f76e0b031ad71558a5b8569ee1 not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.064658 4879 scope.go:117] "RemoveContainer" containerID="be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.065195 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4"} err="failed to get container status \"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4\": rpc error: code = NotFound desc = could not find container \"be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4\": container with ID starting with be6ca23e6375dc3c2bc67bfa56b7e88bc0aa47968ed3e43a197b3e6f76a778c4 not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.065225 4879 scope.go:117] "RemoveContainer" containerID="4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.080778 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.105806 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.113825 4879 scope.go:117] "RemoveContainer" containerID="3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.116561 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.117156 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-log" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.117176 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-log" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.117199 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-metadata" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.117208 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-metadata" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.117227 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd000221-dd8b-4a66-9dfd-14d894fe8d37" containerName="nova-manage" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.117235 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd000221-dd8b-4a66-9dfd-14d894fe8d37" containerName="nova-manage" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.117260 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-api" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.117269 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-api" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.117277 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-log" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.117286 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-log" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.118112 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-log" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.118191 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd000221-dd8b-4a66-9dfd-14d894fe8d37" containerName="nova-manage" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.118209 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-api" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.118225 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" containerName="nova-metadata-metadata" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.118239 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" containerName="nova-api-log" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.120351 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.124476 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.139952 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.150263 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.152529 4879 scope.go:117] "RemoveContainer" containerID="4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.153880 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c\": container with ID starting with 4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c not found: ID does not exist" containerID="4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.154109 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c"} err="failed to get container status \"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c\": rpc error: code = NotFound desc = could not find container \"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c\": container with ID starting with 4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.154169 4879 scope.go:117] "RemoveContainer" containerID="3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf" Nov 25 16:02:59 crc kubenswrapper[4879]: E1125 16:02:59.155014 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf\": container with ID starting with 3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf not found: ID does not exist" containerID="3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.155069 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf"} err="failed to get container status \"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf\": rpc error: code = NotFound desc = could not find container \"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf\": container with ID starting with 3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.155113 4879 scope.go:117] "RemoveContainer" containerID="4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.156592 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c"} err="failed to get container status \"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c\": rpc error: code = NotFound desc = could not find container \"4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c\": container with ID starting with 4aacdbdf285dba2c1b62c10375d220f1e03b3e0d3a04f1773ff5a875d1db089c not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.156669 4879 scope.go:117] "RemoveContainer" containerID="3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.158505 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf"} err="failed to get container status \"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf\": rpc error: code = NotFound desc = could not find container \"3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf\": container with ID starting with 3e443c92d9bad6cc9d9eb4ee41ea3f52556f0c192f6d7e0a3b29019a35a1faaf not found: ID does not exist" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.160197 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.165298 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.168291 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.173763 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.218891 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.219018 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244ad18f-c6af-45ca-b51a-6255c10f2bac-logs\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.219150 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dnrz\" (UniqueName: \"kubernetes.io/projected/244ad18f-c6af-45ca-b51a-6255c10f2bac-kube-api-access-9dnrz\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.219222 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-config-data\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.330507 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dnrz\" (UniqueName: \"kubernetes.io/projected/244ad18f-c6af-45ca-b51a-6255c10f2bac-kube-api-access-9dnrz\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.330898 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-config-data\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.331062 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-logs\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.331226 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.331351 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.331507 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5brk\" (UniqueName: \"kubernetes.io/projected/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-kube-api-access-c5brk\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.331800 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-config-data\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.331963 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244ad18f-c6af-45ca-b51a-6255c10f2bac-logs\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.332369 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244ad18f-c6af-45ca-b51a-6255c10f2bac-logs\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.335785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.335782 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-config-data\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.348697 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dnrz\" (UniqueName: \"kubernetes.io/projected/244ad18f-c6af-45ca-b51a-6255c10f2bac-kube-api-access-9dnrz\") pod \"nova-api-0\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.434238 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-logs\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.434300 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.434355 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5brk\" (UniqueName: \"kubernetes.io/projected/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-kube-api-access-c5brk\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.434413 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-config-data\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.435031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-logs\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.438832 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.439521 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-config-data\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.442841 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.454511 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5brk\" (UniqueName: \"kubernetes.io/projected/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-kube-api-access-c5brk\") pod \"nova-metadata-0\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.484245 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.679145 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b541837-b390-427f-b7c0-7b72d64aa30d" path="/var/lib/kubelet/pods/5b541837-b390-427f-b7c0-7b72d64aa30d/volumes" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.682549 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f12e5a6-a8a9-4929-846f-e35a4665416e" path="/var/lib/kubelet/pods/9f12e5a6-a8a9-4929-846f-e35a4665416e/volumes" Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.877363 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:02:59 crc kubenswrapper[4879]: W1125 16:02:59.887050 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod244ad18f_c6af_45ca_b51a_6255c10f2bac.slice/crio-8c10b97833dbe6fa0f81841b96c7d08e3803499f262a56a25bfb90d51f911c48 WatchSource:0}: Error finding container 8c10b97833dbe6fa0f81841b96c7d08e3803499f262a56a25bfb90d51f911c48: Status 404 returned error can't find the container with id 8c10b97833dbe6fa0f81841b96c7d08e3803499f262a56a25bfb90d51f911c48 Nov 25 16:02:59 crc kubenswrapper[4879]: I1125 16:02:59.967633 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.004312 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"daef3196-0aa9-41d8-92e8-e67bb26b4ec5","Type":"ContainerStarted","Data":"0a8c60814aa77928dcdc983338335b717e4d841c26b7c4db78ce3199c22851d1"} Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.005189 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"244ad18f-c6af-45ca-b51a-6255c10f2bac","Type":"ContainerStarted","Data":"8c10b97833dbe6fa0f81841b96c7d08e3803499f262a56a25bfb90d51f911c48"} Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.410195 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.421101 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.427157 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.502041 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f969fd95-tx5c6"] Nov 25 16:03:00 crc kubenswrapper[4879]: I1125 16:03:00.502624 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerName="dnsmasq-dns" containerID="cri-o://d1add8973062932288eb52a3078bd836a5933523ff285e2b58257efb51f159f2" gracePeriod=10 Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.016836 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"244ad18f-c6af-45ca-b51a-6255c10f2bac","Type":"ContainerStarted","Data":"d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c"} Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.016893 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"244ad18f-c6af-45ca-b51a-6255c10f2bac","Type":"ContainerStarted","Data":"685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d"} Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.019017 4879 generic.go:334] "Generic (PLEG): container finished" podID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerID="d1add8973062932288eb52a3078bd836a5933523ff285e2b58257efb51f159f2" exitCode=0 Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.019085 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" event={"ID":"9d1886cd-560a-4c14-9e9d-286cbcef59d1","Type":"ContainerDied","Data":"d1add8973062932288eb52a3078bd836a5933523ff285e2b58257efb51f159f2"} Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.020784 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"daef3196-0aa9-41d8-92e8-e67bb26b4ec5","Type":"ContainerStarted","Data":"798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46"} Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.020904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"daef3196-0aa9-41d8-92e8-e67bb26b4ec5","Type":"ContainerStarted","Data":"eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98"} Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.034183 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.049172 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.049133485 podStartE2EDuration="2.049133485s" podCreationTimestamp="2025-11-25 16:02:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:01.040822371 +0000 UTC m=+5872.644235452" watchObservedRunningTime="2025-11-25 16:03:01.049133485 +0000 UTC m=+5872.652546556" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.091902 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.091877035 podStartE2EDuration="2.091877035s" podCreationTimestamp="2025-11-25 16:02:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:01.077446466 +0000 UTC m=+5872.680859537" watchObservedRunningTime="2025-11-25 16:03:01.091877035 +0000 UTC m=+5872.695290106" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.470866 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.574789 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-sb\") pod \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.574840 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-config\") pod \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.574890 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-nb\") pod \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.574963 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-dns-svc\") pod \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.575139 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7m5j\" (UniqueName: \"kubernetes.io/projected/9d1886cd-560a-4c14-9e9d-286cbcef59d1-kube-api-access-q7m5j\") pod \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\" (UID: \"9d1886cd-560a-4c14-9e9d-286cbcef59d1\") " Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.580540 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d1886cd-560a-4c14-9e9d-286cbcef59d1-kube-api-access-q7m5j" (OuterVolumeSpecName: "kube-api-access-q7m5j") pod "9d1886cd-560a-4c14-9e9d-286cbcef59d1" (UID: "9d1886cd-560a-4c14-9e9d-286cbcef59d1"). InnerVolumeSpecName "kube-api-access-q7m5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.621823 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9d1886cd-560a-4c14-9e9d-286cbcef59d1" (UID: "9d1886cd-560a-4c14-9e9d-286cbcef59d1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.624157 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9d1886cd-560a-4c14-9e9d-286cbcef59d1" (UID: "9d1886cd-560a-4c14-9e9d-286cbcef59d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.625253 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9d1886cd-560a-4c14-9e9d-286cbcef59d1" (UID: "9d1886cd-560a-4c14-9e9d-286cbcef59d1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.637098 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-config" (OuterVolumeSpecName: "config") pod "9d1886cd-560a-4c14-9e9d-286cbcef59d1" (UID: "9d1886cd-560a-4c14-9e9d-286cbcef59d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.676793 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7m5j\" (UniqueName: \"kubernetes.io/projected/9d1886cd-560a-4c14-9e9d-286cbcef59d1-kube-api-access-q7m5j\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.676833 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.676847 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.676860 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:01 crc kubenswrapper[4879]: I1125 16:03:01.676872 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d1886cd-560a-4c14-9e9d-286cbcef59d1-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.031806 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" event={"ID":"9d1886cd-560a-4c14-9e9d-286cbcef59d1","Type":"ContainerDied","Data":"5cf094ed7eb456db9fef6783a089d45f8d02508748a30164f473c0b12bd9f1a5"} Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.031892 4879 scope.go:117] "RemoveContainer" containerID="d1add8973062932288eb52a3078bd836a5933523ff285e2b58257efb51f159f2" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.032022 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f969fd95-tx5c6" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.071688 4879 scope.go:117] "RemoveContainer" containerID="153fcb577e6424b89530e3f9ac004f03c782ce37e1dcf77c435e4ab144420b2d" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.072576 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f969fd95-tx5c6"] Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.081972 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f969fd95-tx5c6"] Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.382438 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.841656 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-jdgpb"] Nov 25 16:03:02 crc kubenswrapper[4879]: E1125 16:03:02.842262 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerName="dnsmasq-dns" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.842289 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerName="dnsmasq-dns" Nov 25 16:03:02 crc kubenswrapper[4879]: E1125 16:03:02.842330 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerName="init" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.842339 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerName="init" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.842599 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" containerName="dnsmasq-dns" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.843452 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.847183 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.847696 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.864766 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-jdgpb"] Nov 25 16:03:02 crc kubenswrapper[4879]: I1125 16:03:02.997158 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.006564 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-scripts\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.006718 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztx6n\" (UniqueName: \"kubernetes.io/projected/30ceae1f-405c-440c-b3df-aa6a9289506c-kube-api-access-ztx6n\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.006768 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.006873 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-config-data\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.049305 4879 generic.go:334] "Generic (PLEG): container finished" podID="8bdf2df1-26e5-4538-a012-f742d79f31ec" containerID="c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f" exitCode=0 Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.049357 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8bdf2df1-26e5-4538-a012-f742d79f31ec","Type":"ContainerDied","Data":"c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f"} Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.049388 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8bdf2df1-26e5-4538-a012-f742d79f31ec","Type":"ContainerDied","Data":"7f376bf5d7f0ca2b015a7c08541b22638ce80eab0ee168c473c7c7ebc65dcc19"} Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.049410 4879 scope.go:117] "RemoveContainer" containerID="c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.049566 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.085211 4879 scope.go:117] "RemoveContainer" containerID="c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f" Nov 25 16:03:03 crc kubenswrapper[4879]: E1125 16:03:03.086172 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f\": container with ID starting with c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f not found: ID does not exist" containerID="c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.086223 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f"} err="failed to get container status \"c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f\": rpc error: code = NotFound desc = could not find container \"c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f\": container with ID starting with c2bbca5486ef89f904e3ecd017d3239b071e736e09d39a6989f28509174c884f not found: ID does not exist" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108026 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-config-data\") pod \"8bdf2df1-26e5-4538-a012-f742d79f31ec\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108342 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-combined-ca-bundle\") pod \"8bdf2df1-26e5-4538-a012-f742d79f31ec\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108387 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj9cq\" (UniqueName: \"kubernetes.io/projected/8bdf2df1-26e5-4538-a012-f742d79f31ec-kube-api-access-nj9cq\") pod \"8bdf2df1-26e5-4538-a012-f742d79f31ec\" (UID: \"8bdf2df1-26e5-4538-a012-f742d79f31ec\") " Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108624 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-scripts\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108732 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztx6n\" (UniqueName: \"kubernetes.io/projected/30ceae1f-405c-440c-b3df-aa6a9289506c-kube-api-access-ztx6n\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108767 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.108812 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-config-data\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.117898 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-scripts\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.118026 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-config-data\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.118258 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.119194 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bdf2df1-26e5-4538-a012-f742d79f31ec-kube-api-access-nj9cq" (OuterVolumeSpecName: "kube-api-access-nj9cq") pod "8bdf2df1-26e5-4538-a012-f742d79f31ec" (UID: "8bdf2df1-26e5-4538-a012-f742d79f31ec"). InnerVolumeSpecName "kube-api-access-nj9cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.128614 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztx6n\" (UniqueName: \"kubernetes.io/projected/30ceae1f-405c-440c-b3df-aa6a9289506c-kube-api-access-ztx6n\") pod \"nova-cell1-cell-mapping-jdgpb\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.134720 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-config-data" (OuterVolumeSpecName: "config-data") pod "8bdf2df1-26e5-4538-a012-f742d79f31ec" (UID: "8bdf2df1-26e5-4538-a012-f742d79f31ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.135637 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bdf2df1-26e5-4538-a012-f742d79f31ec" (UID: "8bdf2df1-26e5-4538-a012-f742d79f31ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.177055 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.210324 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.210542 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj9cq\" (UniqueName: \"kubernetes.io/projected/8bdf2df1-26e5-4538-a012-f742d79f31ec-kube-api-access-nj9cq\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.210657 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bdf2df1-26e5-4538-a012-f742d79f31ec-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.388887 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.406384 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.416406 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:03 crc kubenswrapper[4879]: E1125 16:03:03.416950 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bdf2df1-26e5-4538-a012-f742d79f31ec" containerName="nova-scheduler-scheduler" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.416977 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bdf2df1-26e5-4538-a012-f742d79f31ec" containerName="nova-scheduler-scheduler" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.417267 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bdf2df1-26e5-4538-a012-f742d79f31ec" containerName="nova-scheduler-scheduler" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.418233 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.420945 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.434870 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.516260 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.516349 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzh25\" (UniqueName: \"kubernetes.io/projected/3929460c-3ac0-4a77-b96f-2177ab7bebef-kube-api-access-nzh25\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.516429 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-config-data\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: W1125 16:03:03.585815 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30ceae1f_405c_440c_b3df_aa6a9289506c.slice/crio-65fbfaef3954f3ad8be1e44e7c88d8c9d32f5dbd0eff99495905894e7245f521 WatchSource:0}: Error finding container 65fbfaef3954f3ad8be1e44e7c88d8c9d32f5dbd0eff99495905894e7245f521: Status 404 returned error can't find the container with id 65fbfaef3954f3ad8be1e44e7c88d8c9d32f5dbd0eff99495905894e7245f521 Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.587775 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-jdgpb"] Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.618281 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.618366 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzh25\" (UniqueName: \"kubernetes.io/projected/3929460c-3ac0-4a77-b96f-2177ab7bebef-kube-api-access-nzh25\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.618434 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-config-data\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.622748 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-config-data\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.623317 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.634380 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzh25\" (UniqueName: \"kubernetes.io/projected/3929460c-3ac0-4a77-b96f-2177ab7bebef-kube-api-access-nzh25\") pod \"nova-scheduler-0\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.658254 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bdf2df1-26e5-4538-a012-f742d79f31ec" path="/var/lib/kubelet/pods/8bdf2df1-26e5-4538-a012-f742d79f31ec/volumes" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.659048 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d1886cd-560a-4c14-9e9d-286cbcef59d1" path="/var/lib/kubelet/pods/9d1886cd-560a-4c14-9e9d-286cbcef59d1/volumes" Nov 25 16:03:03 crc kubenswrapper[4879]: I1125 16:03:03.753040 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:04 crc kubenswrapper[4879]: I1125 16:03:04.060068 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jdgpb" event={"ID":"30ceae1f-405c-440c-b3df-aa6a9289506c","Type":"ContainerStarted","Data":"e799b8b8e7655f8eece1ac632a23c39558a9b1a65eb5b90dc76c482d9b5d8c26"} Nov 25 16:03:04 crc kubenswrapper[4879]: I1125 16:03:04.060373 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jdgpb" event={"ID":"30ceae1f-405c-440c-b3df-aa6a9289506c","Type":"ContainerStarted","Data":"65fbfaef3954f3ad8be1e44e7c88d8c9d32f5dbd0eff99495905894e7245f521"} Nov 25 16:03:04 crc kubenswrapper[4879]: I1125 16:03:04.086081 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-jdgpb" podStartSLOduration=2.086062718 podStartE2EDuration="2.086062718s" podCreationTimestamp="2025-11-25 16:03:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:04.074277611 +0000 UTC m=+5875.677690682" watchObservedRunningTime="2025-11-25 16:03:04.086062718 +0000 UTC m=+5875.689475789" Nov 25 16:03:04 crc kubenswrapper[4879]: W1125 16:03:04.182269 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3929460c_3ac0_4a77_b96f_2177ab7bebef.slice/crio-4c10f0aac7d095013e834667314692bfd38c997c3386d54b7c981409ae71ef99 WatchSource:0}: Error finding container 4c10f0aac7d095013e834667314692bfd38c997c3386d54b7c981409ae71ef99: Status 404 returned error can't find the container with id 4c10f0aac7d095013e834667314692bfd38c997c3386d54b7c981409ae71ef99 Nov 25 16:03:04 crc kubenswrapper[4879]: I1125 16:03:04.182415 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:04 crc kubenswrapper[4879]: I1125 16:03:04.485237 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:03:04 crc kubenswrapper[4879]: I1125 16:03:04.485554 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:03:05 crc kubenswrapper[4879]: I1125 16:03:05.078216 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3929460c-3ac0-4a77-b96f-2177ab7bebef","Type":"ContainerStarted","Data":"466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c"} Nov 25 16:03:05 crc kubenswrapper[4879]: I1125 16:03:05.078250 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3929460c-3ac0-4a77-b96f-2177ab7bebef","Type":"ContainerStarted","Data":"4c10f0aac7d095013e834667314692bfd38c997c3386d54b7c981409ae71ef99"} Nov 25 16:03:05 crc kubenswrapper[4879]: I1125 16:03:05.095313 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.095291806 podStartE2EDuration="2.095291806s" podCreationTimestamp="2025-11-25 16:03:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:05.093324383 +0000 UTC m=+5876.696737444" watchObservedRunningTime="2025-11-25 16:03:05.095291806 +0000 UTC m=+5876.698704877" Nov 25 16:03:08 crc kubenswrapper[4879]: I1125 16:03:08.753884 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 16:03:09 crc kubenswrapper[4879]: I1125 16:03:09.443903 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:03:09 crc kubenswrapper[4879]: I1125 16:03:09.443967 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:03:09 crc kubenswrapper[4879]: I1125 16:03:09.485339 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:03:09 crc kubenswrapper[4879]: I1125 16:03:09.485396 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.130852 4879 generic.go:334] "Generic (PLEG): container finished" podID="30ceae1f-405c-440c-b3df-aa6a9289506c" containerID="e799b8b8e7655f8eece1ac632a23c39558a9b1a65eb5b90dc76c482d9b5d8c26" exitCode=0 Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.130898 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jdgpb" event={"ID":"30ceae1f-405c-440c-b3df-aa6a9289506c","Type":"ContainerDied","Data":"e799b8b8e7655f8eece1ac632a23c39558a9b1a65eb5b90dc76c482d9b5d8c26"} Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.608409 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.75:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.608415 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.76:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.608686 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.76:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.608711 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.75:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:10 crc kubenswrapper[4879]: I1125 16:03:10.644752 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:03:10 crc kubenswrapper[4879]: E1125 16:03:10.645061 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.488083 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.583985 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-combined-ca-bundle\") pod \"30ceae1f-405c-440c-b3df-aa6a9289506c\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.584067 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-config-data\") pod \"30ceae1f-405c-440c-b3df-aa6a9289506c\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.584316 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztx6n\" (UniqueName: \"kubernetes.io/projected/30ceae1f-405c-440c-b3df-aa6a9289506c-kube-api-access-ztx6n\") pod \"30ceae1f-405c-440c-b3df-aa6a9289506c\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.584351 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-scripts\") pod \"30ceae1f-405c-440c-b3df-aa6a9289506c\" (UID: \"30ceae1f-405c-440c-b3df-aa6a9289506c\") " Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.591450 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-scripts" (OuterVolumeSpecName: "scripts") pod "30ceae1f-405c-440c-b3df-aa6a9289506c" (UID: "30ceae1f-405c-440c-b3df-aa6a9289506c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.605297 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ceae1f-405c-440c-b3df-aa6a9289506c-kube-api-access-ztx6n" (OuterVolumeSpecName: "kube-api-access-ztx6n") pod "30ceae1f-405c-440c-b3df-aa6a9289506c" (UID: "30ceae1f-405c-440c-b3df-aa6a9289506c"). InnerVolumeSpecName "kube-api-access-ztx6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.612603 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30ceae1f-405c-440c-b3df-aa6a9289506c" (UID: "30ceae1f-405c-440c-b3df-aa6a9289506c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.614753 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-config-data" (OuterVolumeSpecName: "config-data") pod "30ceae1f-405c-440c-b3df-aa6a9289506c" (UID: "30ceae1f-405c-440c-b3df-aa6a9289506c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.687200 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.687275 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.687286 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztx6n\" (UniqueName: \"kubernetes.io/projected/30ceae1f-405c-440c-b3df-aa6a9289506c-kube-api-access-ztx6n\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:11 crc kubenswrapper[4879]: I1125 16:03:11.687295 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30ceae1f-405c-440c-b3df-aa6a9289506c-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.150866 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-jdgpb" event={"ID":"30ceae1f-405c-440c-b3df-aa6a9289506c","Type":"ContainerDied","Data":"65fbfaef3954f3ad8be1e44e7c88d8c9d32f5dbd0eff99495905894e7245f521"} Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.150910 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65fbfaef3954f3ad8be1e44e7c88d8c9d32f5dbd0eff99495905894e7245f521" Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.150934 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-jdgpb" Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.332185 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.332417 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-log" containerID="cri-o://685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d" gracePeriod=30 Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.332485 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-api" containerID="cri-o://d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c" gracePeriod=30 Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.385427 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.385634 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3929460c-3ac0-4a77-b96f-2177ab7bebef" containerName="nova-scheduler-scheduler" containerID="cri-o://466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c" gracePeriod=30 Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.401640 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.401850 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-log" containerID="cri-o://eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98" gracePeriod=30 Nov 25 16:03:12 crc kubenswrapper[4879]: I1125 16:03:12.402045 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-metadata" containerID="cri-o://798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46" gracePeriod=30 Nov 25 16:03:13 crc kubenswrapper[4879]: I1125 16:03:13.163381 4879 generic.go:334] "Generic (PLEG): container finished" podID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerID="eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98" exitCode=143 Nov 25 16:03:13 crc kubenswrapper[4879]: I1125 16:03:13.163473 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"daef3196-0aa9-41d8-92e8-e67bb26b4ec5","Type":"ContainerDied","Data":"eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98"} Nov 25 16:03:13 crc kubenswrapper[4879]: I1125 16:03:13.165068 4879 generic.go:334] "Generic (PLEG): container finished" podID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerID="685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d" exitCode=143 Nov 25 16:03:13 crc kubenswrapper[4879]: I1125 16:03:13.165117 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"244ad18f-c6af-45ca-b51a-6255c10f2bac","Type":"ContainerDied","Data":"685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d"} Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.845053 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.961400 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-combined-ca-bundle\") pod \"3929460c-3ac0-4a77-b96f-2177ab7bebef\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.961536 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzh25\" (UniqueName: \"kubernetes.io/projected/3929460c-3ac0-4a77-b96f-2177ab7bebef-kube-api-access-nzh25\") pod \"3929460c-3ac0-4a77-b96f-2177ab7bebef\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.961567 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-config-data\") pod \"3929460c-3ac0-4a77-b96f-2177ab7bebef\" (UID: \"3929460c-3ac0-4a77-b96f-2177ab7bebef\") " Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.966613 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3929460c-3ac0-4a77-b96f-2177ab7bebef-kube-api-access-nzh25" (OuterVolumeSpecName: "kube-api-access-nzh25") pod "3929460c-3ac0-4a77-b96f-2177ab7bebef" (UID: "3929460c-3ac0-4a77-b96f-2177ab7bebef"). InnerVolumeSpecName "kube-api-access-nzh25". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.971833 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.981860 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.987856 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-config-data" (OuterVolumeSpecName: "config-data") pod "3929460c-3ac0-4a77-b96f-2177ab7bebef" (UID: "3929460c-3ac0-4a77-b96f-2177ab7bebef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:15 crc kubenswrapper[4879]: I1125 16:03:15.995527 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3929460c-3ac0-4a77-b96f-2177ab7bebef" (UID: "3929460c-3ac0-4a77-b96f-2177ab7bebef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.062800 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-config-data\") pod \"244ad18f-c6af-45ca-b51a-6255c10f2bac\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063116 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244ad18f-c6af-45ca-b51a-6255c10f2bac-logs\") pod \"244ad18f-c6af-45ca-b51a-6255c10f2bac\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063195 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-combined-ca-bundle\") pod \"244ad18f-c6af-45ca-b51a-6255c10f2bac\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063302 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5brk\" (UniqueName: \"kubernetes.io/projected/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-kube-api-access-c5brk\") pod \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063343 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dnrz\" (UniqueName: \"kubernetes.io/projected/244ad18f-c6af-45ca-b51a-6255c10f2bac-kube-api-access-9dnrz\") pod \"244ad18f-c6af-45ca-b51a-6255c10f2bac\" (UID: \"244ad18f-c6af-45ca-b51a-6255c10f2bac\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063360 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-logs\") pod \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063397 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-combined-ca-bundle\") pod \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063435 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-config-data\") pod \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\" (UID: \"daef3196-0aa9-41d8-92e8-e67bb26b4ec5\") " Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.063738 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244ad18f-c6af-45ca-b51a-6255c10f2bac-logs" (OuterVolumeSpecName: "logs") pod "244ad18f-c6af-45ca-b51a-6255c10f2bac" (UID: "244ad18f-c6af-45ca-b51a-6255c10f2bac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.064146 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-logs" (OuterVolumeSpecName: "logs") pod "daef3196-0aa9-41d8-92e8-e67bb26b4ec5" (UID: "daef3196-0aa9-41d8-92e8-e67bb26b4ec5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.064180 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/244ad18f-c6af-45ca-b51a-6255c10f2bac-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.064202 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.064218 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzh25\" (UniqueName: \"kubernetes.io/projected/3929460c-3ac0-4a77-b96f-2177ab7bebef-kube-api-access-nzh25\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.064231 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3929460c-3ac0-4a77-b96f-2177ab7bebef-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.066487 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-kube-api-access-c5brk" (OuterVolumeSpecName: "kube-api-access-c5brk") pod "daef3196-0aa9-41d8-92e8-e67bb26b4ec5" (UID: "daef3196-0aa9-41d8-92e8-e67bb26b4ec5"). InnerVolumeSpecName "kube-api-access-c5brk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.066637 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/244ad18f-c6af-45ca-b51a-6255c10f2bac-kube-api-access-9dnrz" (OuterVolumeSpecName: "kube-api-access-9dnrz") pod "244ad18f-c6af-45ca-b51a-6255c10f2bac" (UID: "244ad18f-c6af-45ca-b51a-6255c10f2bac"). InnerVolumeSpecName "kube-api-access-9dnrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.087214 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "daef3196-0aa9-41d8-92e8-e67bb26b4ec5" (UID: "daef3196-0aa9-41d8-92e8-e67bb26b4ec5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.087621 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "244ad18f-c6af-45ca-b51a-6255c10f2bac" (UID: "244ad18f-c6af-45ca-b51a-6255c10f2bac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.087658 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-config-data" (OuterVolumeSpecName: "config-data") pod "daef3196-0aa9-41d8-92e8-e67bb26b4ec5" (UID: "daef3196-0aa9-41d8-92e8-e67bb26b4ec5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.091525 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-config-data" (OuterVolumeSpecName: "config-data") pod "244ad18f-c6af-45ca-b51a-6255c10f2bac" (UID: "244ad18f-c6af-45ca-b51a-6255c10f2bac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165778 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165810 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5brk\" (UniqueName: \"kubernetes.io/projected/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-kube-api-access-c5brk\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165820 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dnrz\" (UniqueName: \"kubernetes.io/projected/244ad18f-c6af-45ca-b51a-6255c10f2bac-kube-api-access-9dnrz\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165829 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165837 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165847 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daef3196-0aa9-41d8-92e8-e67bb26b4ec5-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.165855 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244ad18f-c6af-45ca-b51a-6255c10f2bac-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.196348 4879 generic.go:334] "Generic (PLEG): container finished" podID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerID="d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c" exitCode=0 Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.196408 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.196419 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"244ad18f-c6af-45ca-b51a-6255c10f2bac","Type":"ContainerDied","Data":"d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c"} Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.196530 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"244ad18f-c6af-45ca-b51a-6255c10f2bac","Type":"ContainerDied","Data":"8c10b97833dbe6fa0f81841b96c7d08e3803499f262a56a25bfb90d51f911c48"} Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.196557 4879 scope.go:117] "RemoveContainer" containerID="d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.197909 4879 generic.go:334] "Generic (PLEG): container finished" podID="3929460c-3ac0-4a77-b96f-2177ab7bebef" containerID="466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c" exitCode=0 Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.197985 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.198001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3929460c-3ac0-4a77-b96f-2177ab7bebef","Type":"ContainerDied","Data":"466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c"} Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.198019 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3929460c-3ac0-4a77-b96f-2177ab7bebef","Type":"ContainerDied","Data":"4c10f0aac7d095013e834667314692bfd38c997c3386d54b7c981409ae71ef99"} Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.200297 4879 generic.go:334] "Generic (PLEG): container finished" podID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerID="798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46" exitCode=0 Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.200326 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"daef3196-0aa9-41d8-92e8-e67bb26b4ec5","Type":"ContainerDied","Data":"798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46"} Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.200346 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"daef3196-0aa9-41d8-92e8-e67bb26b4ec5","Type":"ContainerDied","Data":"0a8c60814aa77928dcdc983338335b717e4d841c26b7c4db78ce3199c22851d1"} Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.200394 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.222182 4879 scope.go:117] "RemoveContainer" containerID="685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.283821 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.289020 4879 scope.go:117] "RemoveContainer" containerID="d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.289478 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c\": container with ID starting with d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c not found: ID does not exist" containerID="d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.289519 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c"} err="failed to get container status \"d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c\": rpc error: code = NotFound desc = could not find container \"d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c\": container with ID starting with d41ad2bfb358232a2b849096c570e0ad256d5c2504fe32e8d818eddf88655f7c not found: ID does not exist" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.289548 4879 scope.go:117] "RemoveContainer" containerID="685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.289896 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d\": container with ID starting with 685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d not found: ID does not exist" containerID="685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.289947 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d"} err="failed to get container status \"685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d\": rpc error: code = NotFound desc = could not find container \"685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d\": container with ID starting with 685960d850c50ea65b3a79ba82f493afee83bbd7c6a9ad63861062239018a87d not found: ID does not exist" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.289981 4879 scope.go:117] "RemoveContainer" containerID="466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.306504 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.317917 4879 scope.go:117] "RemoveContainer" containerID="466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.318364 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c\": container with ID starting with 466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c not found: ID does not exist" containerID="466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.318393 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c"} err="failed to get container status \"466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c\": rpc error: code = NotFound desc = could not find container \"466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c\": container with ID starting with 466d6b6f1938b60422074c4757286390214052e37b8c75dee85447f0fdb7827c not found: ID does not exist" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.318419 4879 scope.go:117] "RemoveContainer" containerID="798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.330642 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.331104 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3929460c-3ac0-4a77-b96f-2177ab7bebef" containerName="nova-scheduler-scheduler" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331146 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3929460c-3ac0-4a77-b96f-2177ab7bebef" containerName="nova-scheduler-scheduler" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.331162 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-metadata" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331171 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-metadata" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.331185 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ceae1f-405c-440c-b3df-aa6a9289506c" containerName="nova-manage" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331193 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ceae1f-405c-440c-b3df-aa6a9289506c" containerName="nova-manage" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.331210 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-log" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331219 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-log" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.331247 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-api" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331254 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-api" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.331271 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-log" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331279 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-log" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331446 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-log" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331463 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-metadata" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331477 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ceae1f-405c-440c-b3df-aa6a9289506c" containerName="nova-manage" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331495 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" containerName="nova-metadata-log" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331506 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" containerName="nova-api-api" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.331515 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3929460c-3ac0-4a77-b96f-2177ab7bebef" containerName="nova-scheduler-scheduler" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.332154 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.335067 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.343047 4879 scope.go:117] "RemoveContainer" containerID="eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.352254 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.361603 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.365806 4879 scope.go:117] "RemoveContainer" containerID="798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.366297 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46\": container with ID starting with 798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46 not found: ID does not exist" containerID="798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.366340 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46"} err="failed to get container status \"798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46\": rpc error: code = NotFound desc = could not find container \"798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46\": container with ID starting with 798e421d8030225d3f24afab9e74bc0c61147874a954a88fb4e9007b1aa66f46 not found: ID does not exist" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.366370 4879 scope.go:117] "RemoveContainer" containerID="eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98" Nov 25 16:03:16 crc kubenswrapper[4879]: E1125 16:03:16.366714 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98\": container with ID starting with eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98 not found: ID does not exist" containerID="eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.366744 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98"} err="failed to get container status \"eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98\": rpc error: code = NotFound desc = could not find container \"eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98\": container with ID starting with eb3dd90f2f47fc77caff2967f9c544473839ccefc47f0a04a9a0f4be4932ba98 not found: ID does not exist" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.371097 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.379802 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.389738 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.391474 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.393452 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.398764 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.408658 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.417134 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.418828 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.421464 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.426276 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473206 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-config-data\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473262 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c049fa46-6a96-405d-9cf7-a86978fdd705-logs\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473306 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473352 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-config-data\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473513 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473572 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-config-data\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473604 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4lkv\" (UniqueName: \"kubernetes.io/projected/4223a71e-c436-4808-b86a-971246d6ef92-kube-api-access-q4lkv\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473632 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87ckk\" (UniqueName: \"kubernetes.io/projected/c049fa46-6a96-405d-9cf7-a86978fdd705-kube-api-access-87ckk\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473710 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473736 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-logs\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.473788 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v7kc\" (UniqueName: \"kubernetes.io/projected/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-kube-api-access-8v7kc\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.575840 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v7kc\" (UniqueName: \"kubernetes.io/projected/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-kube-api-access-8v7kc\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.575902 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-config-data\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.575932 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c049fa46-6a96-405d-9cf7-a86978fdd705-logs\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.575980 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576032 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-config-data\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576063 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576086 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-config-data\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576110 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4lkv\" (UniqueName: \"kubernetes.io/projected/4223a71e-c436-4808-b86a-971246d6ef92-kube-api-access-q4lkv\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576250 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87ckk\" (UniqueName: \"kubernetes.io/projected/c049fa46-6a96-405d-9cf7-a86978fdd705-kube-api-access-87ckk\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576298 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576328 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-logs\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576905 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c049fa46-6a96-405d-9cf7-a86978fdd705-logs\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.576985 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-logs\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.580366 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-config-data\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.580830 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-config-data\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.581040 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.582236 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-config-data\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.582360 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.582261 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.593742 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4lkv\" (UniqueName: \"kubernetes.io/projected/4223a71e-c436-4808-b86a-971246d6ef92-kube-api-access-q4lkv\") pod \"nova-scheduler-0\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.594420 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v7kc\" (UniqueName: \"kubernetes.io/projected/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-kube-api-access-8v7kc\") pod \"nova-metadata-0\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.596772 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87ckk\" (UniqueName: \"kubernetes.io/projected/c049fa46-6a96-405d-9cf7-a86978fdd705-kube-api-access-87ckk\") pod \"nova-api-0\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " pod="openstack/nova-api-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.653060 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.710655 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:03:16 crc kubenswrapper[4879]: I1125 16:03:16.734679 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.155186 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:03:17 crc kubenswrapper[4879]: W1125 16:03:17.158977 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62c2b162_8b9a_4c6f_a29c_03a03beeb22c.slice/crio-1ee586b0f763a09664d8ba89ffa8900aa7456d8d4f319ab52ca0f30b871a0488 WatchSource:0}: Error finding container 1ee586b0f763a09664d8ba89ffa8900aa7456d8d4f319ab52ca0f30b871a0488: Status 404 returned error can't find the container with id 1ee586b0f763a09664d8ba89ffa8900aa7456d8d4f319ab52ca0f30b871a0488 Nov 25 16:03:17 crc kubenswrapper[4879]: W1125 16:03:17.161072 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4223a71e_c436_4808_b86a_971246d6ef92.slice/crio-4ec2bfe1ffe957bc9dbd4ed3394a9d5e918afe3d5628c452049bc2c92598164f WatchSource:0}: Error finding container 4ec2bfe1ffe957bc9dbd4ed3394a9d5e918afe3d5628c452049bc2c92598164f: Status 404 returned error can't find the container with id 4ec2bfe1ffe957bc9dbd4ed3394a9d5e918afe3d5628c452049bc2c92598164f Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.163351 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.213715 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"62c2b162-8b9a-4c6f-a29c-03a03beeb22c","Type":"ContainerStarted","Data":"1ee586b0f763a09664d8ba89ffa8900aa7456d8d4f319ab52ca0f30b871a0488"} Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.215802 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4223a71e-c436-4808-b86a-971246d6ef92","Type":"ContainerStarted","Data":"4ec2bfe1ffe957bc9dbd4ed3394a9d5e918afe3d5628c452049bc2c92598164f"} Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.266702 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:03:17 crc kubenswrapper[4879]: W1125 16:03:17.273176 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc049fa46_6a96_405d_9cf7_a86978fdd705.slice/crio-ed2d0720413274061a31e11ebfe448eb8b897b7720434221c09bb7aa8b5760ec WatchSource:0}: Error finding container ed2d0720413274061a31e11ebfe448eb8b897b7720434221c09bb7aa8b5760ec: Status 404 returned error can't find the container with id ed2d0720413274061a31e11ebfe448eb8b897b7720434221c09bb7aa8b5760ec Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.655431 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="244ad18f-c6af-45ca-b51a-6255c10f2bac" path="/var/lib/kubelet/pods/244ad18f-c6af-45ca-b51a-6255c10f2bac/volumes" Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.656364 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3929460c-3ac0-4a77-b96f-2177ab7bebef" path="/var/lib/kubelet/pods/3929460c-3ac0-4a77-b96f-2177ab7bebef/volumes" Nov 25 16:03:17 crc kubenswrapper[4879]: I1125 16:03:17.657029 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daef3196-0aa9-41d8-92e8-e67bb26b4ec5" path="/var/lib/kubelet/pods/daef3196-0aa9-41d8-92e8-e67bb26b4ec5/volumes" Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.230369 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"62c2b162-8b9a-4c6f-a29c-03a03beeb22c","Type":"ContainerStarted","Data":"52ebfc79e22cbc7a62153d5017b54e7dfef08229c54c4e6444748694c712a053"} Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.231697 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"62c2b162-8b9a-4c6f-a29c-03a03beeb22c","Type":"ContainerStarted","Data":"eee429f2eb67288e9eabb20211fb4fd9768f7f5c6a556a75f790c7a5f3fd88e3"} Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.233865 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c049fa46-6a96-405d-9cf7-a86978fdd705","Type":"ContainerStarted","Data":"c8d00db643bb70b03b1122054d30fd3cf5e95f9800ef4c151258068b1a56f710"} Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.233969 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c049fa46-6a96-405d-9cf7-a86978fdd705","Type":"ContainerStarted","Data":"412dcc7cbf510b3ae43bbac4a484c394bdc03e83273b5bbd5f095b3569b7cf4b"} Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.234079 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c049fa46-6a96-405d-9cf7-a86978fdd705","Type":"ContainerStarted","Data":"ed2d0720413274061a31e11ebfe448eb8b897b7720434221c09bb7aa8b5760ec"} Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.249168 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4223a71e-c436-4808-b86a-971246d6ef92","Type":"ContainerStarted","Data":"5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18"} Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.274759 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.27473848 podStartE2EDuration="2.27473848s" podCreationTimestamp="2025-11-25 16:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:18.264443972 +0000 UTC m=+5889.867857053" watchObservedRunningTime="2025-11-25 16:03:18.27473848 +0000 UTC m=+5889.878151551" Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.328563 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.328546267 podStartE2EDuration="2.328546267s" podCreationTimestamp="2025-11-25 16:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:18.32825637 +0000 UTC m=+5889.931669451" watchObservedRunningTime="2025-11-25 16:03:18.328546267 +0000 UTC m=+5889.931959338" Nov 25 16:03:18 crc kubenswrapper[4879]: I1125 16:03:18.353689 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.353674223 podStartE2EDuration="2.353674223s" podCreationTimestamp="2025-11-25 16:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:18.347909798 +0000 UTC m=+5889.951322859" watchObservedRunningTime="2025-11-25 16:03:18.353674223 +0000 UTC m=+5889.957087294" Nov 25 16:03:21 crc kubenswrapper[4879]: I1125 16:03:21.655564 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 16:03:21 crc kubenswrapper[4879]: I1125 16:03:21.711623 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:03:21 crc kubenswrapper[4879]: I1125 16:03:21.711681 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:03:24 crc kubenswrapper[4879]: I1125 16:03:24.645389 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:03:24 crc kubenswrapper[4879]: E1125 16:03:24.645956 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:03:26 crc kubenswrapper[4879]: I1125 16:03:26.654447 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 25 16:03:26 crc kubenswrapper[4879]: I1125 16:03:26.682432 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 25 16:03:26 crc kubenswrapper[4879]: I1125 16:03:26.711619 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:03:26 crc kubenswrapper[4879]: I1125 16:03:26.711715 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:03:26 crc kubenswrapper[4879]: I1125 16:03:26.735705 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:03:26 crc kubenswrapper[4879]: I1125 16:03:26.735762 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:03:27 crc kubenswrapper[4879]: I1125 16:03:27.354671 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 25 16:03:27 crc kubenswrapper[4879]: I1125 16:03:27.877399 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:27 crc kubenswrapper[4879]: I1125 16:03:27.877414 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.81:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:27 crc kubenswrapper[4879]: I1125 16:03:27.877452 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:27 crc kubenswrapper[4879]: I1125 16:03:27.877474 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.81:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:03:35 crc kubenswrapper[4879]: I1125 16:03:35.644397 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:03:35 crc kubenswrapper[4879]: E1125 16:03:35.644974 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.714601 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.715141 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.717343 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.717432 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.739569 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.739966 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.740142 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 16:03:36 crc kubenswrapper[4879]: I1125 16:03:36.743661 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.420398 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.423820 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.603593 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67c7c5f9c9-ktnmr"] Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.606417 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.625100 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67c7c5f9c9-ktnmr"] Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.667005 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-dns-svc\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.667601 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-nb\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.667754 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-sb\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.667871 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6h4n\" (UniqueName: \"kubernetes.io/projected/3e57cd23-6502-4059-8d9f-99707a23b4b4-kube-api-access-s6h4n\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.668040 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-config\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.769504 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-dns-svc\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.769593 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-nb\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.769679 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-sb\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.769732 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6h4n\" (UniqueName: \"kubernetes.io/projected/3e57cd23-6502-4059-8d9f-99707a23b4b4-kube-api-access-s6h4n\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.769787 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-config\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.770655 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-nb\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.770753 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-config\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.771377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-dns-svc\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.771489 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-sb\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.791319 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6h4n\" (UniqueName: \"kubernetes.io/projected/3e57cd23-6502-4059-8d9f-99707a23b4b4-kube-api-access-s6h4n\") pod \"dnsmasq-dns-67c7c5f9c9-ktnmr\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:37 crc kubenswrapper[4879]: I1125 16:03:37.939894 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:38 crc kubenswrapper[4879]: W1125 16:03:38.439562 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e57cd23_6502_4059_8d9f_99707a23b4b4.slice/crio-3ed8d79759cd26963d212c04a64490229e0c9c25ac84a1066e3082065d67cd16 WatchSource:0}: Error finding container 3ed8d79759cd26963d212c04a64490229e0c9c25ac84a1066e3082065d67cd16: Status 404 returned error can't find the container with id 3ed8d79759cd26963d212c04a64490229e0c9c25ac84a1066e3082065d67cd16 Nov 25 16:03:38 crc kubenswrapper[4879]: I1125 16:03:38.439890 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67c7c5f9c9-ktnmr"] Nov 25 16:03:39 crc kubenswrapper[4879]: I1125 16:03:39.440762 4879 generic.go:334] "Generic (PLEG): container finished" podID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerID="0705b3b9219373f765e4b32d691a347cd667b110a3802b41190f319bb9680dba" exitCode=0 Nov 25 16:03:39 crc kubenswrapper[4879]: I1125 16:03:39.440875 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" event={"ID":"3e57cd23-6502-4059-8d9f-99707a23b4b4","Type":"ContainerDied","Data":"0705b3b9219373f765e4b32d691a347cd667b110a3802b41190f319bb9680dba"} Nov 25 16:03:39 crc kubenswrapper[4879]: I1125 16:03:39.441397 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" event={"ID":"3e57cd23-6502-4059-8d9f-99707a23b4b4","Type":"ContainerStarted","Data":"3ed8d79759cd26963d212c04a64490229e0c9c25ac84a1066e3082065d67cd16"} Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.462416 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" event={"ID":"3e57cd23-6502-4059-8d9f-99707a23b4b4","Type":"ContainerStarted","Data":"b1627d80034bbc1d330225cc9d41ddf2f84ffa8d9df578ffc4747c90f67130cc"} Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.463145 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.497363 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" podStartSLOduration=3.497346035 podStartE2EDuration="3.497346035s" podCreationTimestamp="2025-11-25 16:03:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:40.487033518 +0000 UTC m=+5912.090446579" watchObservedRunningTime="2025-11-25 16:03:40.497346035 +0000 UTC m=+5912.100759096" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.663572 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dsnn4"] Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.665612 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.693045 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dsnn4"] Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.747248 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-utilities\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.747474 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-catalog-content\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.747505 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qft27\" (UniqueName: \"kubernetes.io/projected/86c033a7-03f5-4309-a06d-61cdd44f3485-kube-api-access-qft27\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.849027 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-utilities\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.849190 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-catalog-content\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.849230 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qft27\" (UniqueName: \"kubernetes.io/projected/86c033a7-03f5-4309-a06d-61cdd44f3485-kube-api-access-qft27\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.849777 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-catalog-content\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.850322 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-utilities\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.872463 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qft27\" (UniqueName: \"kubernetes.io/projected/86c033a7-03f5-4309-a06d-61cdd44f3485-kube-api-access-qft27\") pod \"redhat-marketplace-dsnn4\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:40 crc kubenswrapper[4879]: I1125 16:03:40.988458 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:41 crc kubenswrapper[4879]: I1125 16:03:41.485501 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dsnn4"] Nov 25 16:03:42 crc kubenswrapper[4879]: I1125 16:03:42.486016 4879 generic.go:334] "Generic (PLEG): container finished" podID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerID="d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2" exitCode=0 Nov 25 16:03:42 crc kubenswrapper[4879]: I1125 16:03:42.486100 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dsnn4" event={"ID":"86c033a7-03f5-4309-a06d-61cdd44f3485","Type":"ContainerDied","Data":"d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2"} Nov 25 16:03:42 crc kubenswrapper[4879]: I1125 16:03:42.486386 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dsnn4" event={"ID":"86c033a7-03f5-4309-a06d-61cdd44f3485","Type":"ContainerStarted","Data":"9513487cfd35098afdb2570bb1321236461d0ab2ca4553ce24b985c5fff35aca"} Nov 25 16:03:43 crc kubenswrapper[4879]: I1125 16:03:43.577272 4879 scope.go:117] "RemoveContainer" containerID="706f81d34201040112f6f101080b7d5ff912b4fe675930e89f303d9fa1e58895" Nov 25 16:03:44 crc kubenswrapper[4879]: I1125 16:03:44.504341 4879 generic.go:334] "Generic (PLEG): container finished" podID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerID="1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1" exitCode=0 Nov 25 16:03:44 crc kubenswrapper[4879]: I1125 16:03:44.504436 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dsnn4" event={"ID":"86c033a7-03f5-4309-a06d-61cdd44f3485","Type":"ContainerDied","Data":"1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1"} Nov 25 16:03:45 crc kubenswrapper[4879]: I1125 16:03:45.513839 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dsnn4" event={"ID":"86c033a7-03f5-4309-a06d-61cdd44f3485","Type":"ContainerStarted","Data":"71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389"} Nov 25 16:03:45 crc kubenswrapper[4879]: I1125 16:03:45.542058 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dsnn4" podStartSLOduration=2.95348362 podStartE2EDuration="5.542027677s" podCreationTimestamp="2025-11-25 16:03:40 +0000 UTC" firstStartedPulling="2025-11-25 16:03:42.492149526 +0000 UTC m=+5914.095562597" lastFinishedPulling="2025-11-25 16:03:45.080693583 +0000 UTC m=+5916.684106654" observedRunningTime="2025-11-25 16:03:45.536774147 +0000 UTC m=+5917.140187218" watchObservedRunningTime="2025-11-25 16:03:45.542027677 +0000 UTC m=+5917.145440748" Nov 25 16:03:47 crc kubenswrapper[4879]: I1125 16:03:47.941315 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.009659 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c5b75c-qw96h"] Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.009889 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerName="dnsmasq-dns" containerID="cri-o://f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5" gracePeriod=10 Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.511454 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.548504 4879 generic.go:334] "Generic (PLEG): container finished" podID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerID="f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5" exitCode=0 Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.548549 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" event={"ID":"7c70edcb-70c0-4074-8e4b-45e6b0e3b171","Type":"ContainerDied","Data":"f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5"} Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.548574 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" event={"ID":"7c70edcb-70c0-4074-8e4b-45e6b0e3b171","Type":"ContainerDied","Data":"7729cfb48031c5cea4411fca13dc239addb36cbd1a4af0e4ac43a59272d55b43"} Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.548591 4879 scope.go:117] "RemoveContainer" containerID="f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.548721 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84c5b75c-qw96h" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.570674 4879 scope.go:117] "RemoveContainer" containerID="6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.606666 4879 scope.go:117] "RemoveContainer" containerID="f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5" Nov 25 16:03:48 crc kubenswrapper[4879]: E1125 16:03:48.607185 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5\": container with ID starting with f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5 not found: ID does not exist" containerID="f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.607216 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5"} err="failed to get container status \"f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5\": rpc error: code = NotFound desc = could not find container \"f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5\": container with ID starting with f902a3cea3564b6566b808c3c55ef9e0cf0afecbb374525d3c39921b202bb5e5 not found: ID does not exist" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.607236 4879 scope.go:117] "RemoveContainer" containerID="6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1" Nov 25 16:03:48 crc kubenswrapper[4879]: E1125 16:03:48.607547 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1\": container with ID starting with 6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1 not found: ID does not exist" containerID="6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.607572 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1"} err="failed to get container status \"6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1\": rpc error: code = NotFound desc = could not find container \"6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1\": container with ID starting with 6752f46e3aedf9d04d97c22d9dad6e100a21f51f0e865b71e72ab55b0dad4cc1 not found: ID does not exist" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.623957 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-config\") pod \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.624094 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-nb\") pod \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.624243 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-dns-svc\") pod \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.624269 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tv2g\" (UniqueName: \"kubernetes.io/projected/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-kube-api-access-4tv2g\") pod \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.624326 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-sb\") pod \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\" (UID: \"7c70edcb-70c0-4074-8e4b-45e6b0e3b171\") " Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.631027 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-kube-api-access-4tv2g" (OuterVolumeSpecName: "kube-api-access-4tv2g") pod "7c70edcb-70c0-4074-8e4b-45e6b0e3b171" (UID: "7c70edcb-70c0-4074-8e4b-45e6b0e3b171"). InnerVolumeSpecName "kube-api-access-4tv2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.646272 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:03:48 crc kubenswrapper[4879]: E1125 16:03:48.646787 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.673579 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7c70edcb-70c0-4074-8e4b-45e6b0e3b171" (UID: "7c70edcb-70c0-4074-8e4b-45e6b0e3b171"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.679330 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-config" (OuterVolumeSpecName: "config") pod "7c70edcb-70c0-4074-8e4b-45e6b0e3b171" (UID: "7c70edcb-70c0-4074-8e4b-45e6b0e3b171"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.686940 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7c70edcb-70c0-4074-8e4b-45e6b0e3b171" (UID: "7c70edcb-70c0-4074-8e4b-45e6b0e3b171"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.700470 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7c70edcb-70c0-4074-8e4b-45e6b0e3b171" (UID: "7c70edcb-70c0-4074-8e4b-45e6b0e3b171"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.726949 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.726992 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tv2g\" (UniqueName: \"kubernetes.io/projected/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-kube-api-access-4tv2g\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.727009 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.727021 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.727033 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c70edcb-70c0-4074-8e4b-45e6b0e3b171-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.903241 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84c5b75c-qw96h"] Nov 25 16:03:48 crc kubenswrapper[4879]: I1125 16:03:48.910875 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84c5b75c-qw96h"] Nov 25 16:03:49 crc kubenswrapper[4879]: I1125 16:03:49.656414 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" path="/var/lib/kubelet/pods/7c70edcb-70c0-4074-8e4b-45e6b0e3b171/volumes" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.438898 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-7vdtq"] Nov 25 16:03:50 crc kubenswrapper[4879]: E1125 16:03:50.439332 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerName="dnsmasq-dns" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.439349 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerName="dnsmasq-dns" Nov 25 16:03:50 crc kubenswrapper[4879]: E1125 16:03:50.439385 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerName="init" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.439393 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerName="init" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.439614 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c70edcb-70c0-4074-8e4b-45e6b0e3b171" containerName="dnsmasq-dns" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.440265 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.464221 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-7vdtq"] Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.537070 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-acee-account-create-nrwzm"] Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.539085 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.544560 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.551298 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-acee-account-create-nrwzm"] Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.558666 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tjfw\" (UniqueName: \"kubernetes.io/projected/db08830b-81f9-4ec0-a944-2ce3d14283ab-kube-api-access-8tjfw\") pod \"cinder-db-create-7vdtq\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.558955 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db08830b-81f9-4ec0-a944-2ce3d14283ab-operator-scripts\") pod \"cinder-db-create-7vdtq\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.661080 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-operator-scripts\") pod \"cinder-acee-account-create-nrwzm\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.661356 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tjfw\" (UniqueName: \"kubernetes.io/projected/db08830b-81f9-4ec0-a944-2ce3d14283ab-kube-api-access-8tjfw\") pod \"cinder-db-create-7vdtq\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.661640 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w7sk\" (UniqueName: \"kubernetes.io/projected/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-kube-api-access-6w7sk\") pod \"cinder-acee-account-create-nrwzm\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.661707 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db08830b-81f9-4ec0-a944-2ce3d14283ab-operator-scripts\") pod \"cinder-db-create-7vdtq\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.662731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db08830b-81f9-4ec0-a944-2ce3d14283ab-operator-scripts\") pod \"cinder-db-create-7vdtq\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.677616 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tjfw\" (UniqueName: \"kubernetes.io/projected/db08830b-81f9-4ec0-a944-2ce3d14283ab-kube-api-access-8tjfw\") pod \"cinder-db-create-7vdtq\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.762891 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.764452 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-operator-scripts\") pod \"cinder-acee-account-create-nrwzm\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.765604 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-operator-scripts\") pod \"cinder-acee-account-create-nrwzm\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.766686 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w7sk\" (UniqueName: \"kubernetes.io/projected/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-kube-api-access-6w7sk\") pod \"cinder-acee-account-create-nrwzm\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.784511 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w7sk\" (UniqueName: \"kubernetes.io/projected/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-kube-api-access-6w7sk\") pod \"cinder-acee-account-create-nrwzm\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.858503 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.988971 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:50 crc kubenswrapper[4879]: I1125 16:03:50.989416 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.044798 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.229808 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-7vdtq"] Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.341197 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-acee-account-create-nrwzm"] Nov 25 16:03:51 crc kubenswrapper[4879]: W1125 16:03:51.343190 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf4cd190_d5bf_4367_ad51_61a1e6a873f4.slice/crio-6162fe57eb89bc19dd5b92df2ff87c77ef6549a3522eefffdcbc57af03bc17a4 WatchSource:0}: Error finding container 6162fe57eb89bc19dd5b92df2ff87c77ef6549a3522eefffdcbc57af03bc17a4: Status 404 returned error can't find the container with id 6162fe57eb89bc19dd5b92df2ff87c77ef6549a3522eefffdcbc57af03bc17a4 Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.584579 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acee-account-create-nrwzm" event={"ID":"cf4cd190-d5bf-4367-ad51-61a1e6a873f4","Type":"ContainerStarted","Data":"5500138fde5179c471ebd7478af32d39c41c99d99ef3ef34e8eba40824e9c8fd"} Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.584916 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acee-account-create-nrwzm" event={"ID":"cf4cd190-d5bf-4367-ad51-61a1e6a873f4","Type":"ContainerStarted","Data":"6162fe57eb89bc19dd5b92df2ff87c77ef6549a3522eefffdcbc57af03bc17a4"} Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.588434 4879 generic.go:334] "Generic (PLEG): container finished" podID="db08830b-81f9-4ec0-a944-2ce3d14283ab" containerID="7df753619ee1f46997dc5833356bdbc9731fd5318a77c9fecfa45c047318a824" exitCode=0 Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.588575 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-7vdtq" event={"ID":"db08830b-81f9-4ec0-a944-2ce3d14283ab","Type":"ContainerDied","Data":"7df753619ee1f46997dc5833356bdbc9731fd5318a77c9fecfa45c047318a824"} Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.588667 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-7vdtq" event={"ID":"db08830b-81f9-4ec0-a944-2ce3d14283ab","Type":"ContainerStarted","Data":"37e60c4befc60e6f28b66bc17413a05093e3c4fbc53a2f54e335b5edbdc87529"} Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.610420 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-acee-account-create-nrwzm" podStartSLOduration=1.6103953450000001 podStartE2EDuration="1.610395345s" podCreationTimestamp="2025-11-25 16:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:51.599730598 +0000 UTC m=+5923.203143669" watchObservedRunningTime="2025-11-25 16:03:51.610395345 +0000 UTC m=+5923.213808436" Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.633075 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:51 crc kubenswrapper[4879]: I1125 16:03:51.679671 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dsnn4"] Nov 25 16:03:52 crc kubenswrapper[4879]: I1125 16:03:52.599292 4879 generic.go:334] "Generic (PLEG): container finished" podID="cf4cd190-d5bf-4367-ad51-61a1e6a873f4" containerID="5500138fde5179c471ebd7478af32d39c41c99d99ef3ef34e8eba40824e9c8fd" exitCode=0 Nov 25 16:03:52 crc kubenswrapper[4879]: I1125 16:03:52.599350 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acee-account-create-nrwzm" event={"ID":"cf4cd190-d5bf-4367-ad51-61a1e6a873f4","Type":"ContainerDied","Data":"5500138fde5179c471ebd7478af32d39c41c99d99ef3ef34e8eba40824e9c8fd"} Nov 25 16:03:52 crc kubenswrapper[4879]: I1125 16:03:52.893807 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.004341 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db08830b-81f9-4ec0-a944-2ce3d14283ab-operator-scripts\") pod \"db08830b-81f9-4ec0-a944-2ce3d14283ab\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.004406 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tjfw\" (UniqueName: \"kubernetes.io/projected/db08830b-81f9-4ec0-a944-2ce3d14283ab-kube-api-access-8tjfw\") pod \"db08830b-81f9-4ec0-a944-2ce3d14283ab\" (UID: \"db08830b-81f9-4ec0-a944-2ce3d14283ab\") " Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.004836 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db08830b-81f9-4ec0-a944-2ce3d14283ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "db08830b-81f9-4ec0-a944-2ce3d14283ab" (UID: "db08830b-81f9-4ec0-a944-2ce3d14283ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.009013 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db08830b-81f9-4ec0-a944-2ce3d14283ab-kube-api-access-8tjfw" (OuterVolumeSpecName: "kube-api-access-8tjfw") pod "db08830b-81f9-4ec0-a944-2ce3d14283ab" (UID: "db08830b-81f9-4ec0-a944-2ce3d14283ab"). InnerVolumeSpecName "kube-api-access-8tjfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.106168 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db08830b-81f9-4ec0-a944-2ce3d14283ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.106223 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tjfw\" (UniqueName: \"kubernetes.io/projected/db08830b-81f9-4ec0-a944-2ce3d14283ab-kube-api-access-8tjfw\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.609056 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-7vdtq" event={"ID":"db08830b-81f9-4ec0-a944-2ce3d14283ab","Type":"ContainerDied","Data":"37e60c4befc60e6f28b66bc17413a05093e3c4fbc53a2f54e335b5edbdc87529"} Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.609421 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37e60c4befc60e6f28b66bc17413a05093e3c4fbc53a2f54e335b5edbdc87529" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.609254 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dsnn4" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="registry-server" containerID="cri-o://71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389" gracePeriod=2 Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.609252 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7vdtq" Nov 25 16:03:53 crc kubenswrapper[4879]: I1125 16:03:53.977629 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.020586 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w7sk\" (UniqueName: \"kubernetes.io/projected/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-kube-api-access-6w7sk\") pod \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.021946 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-operator-scripts\") pod \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\" (UID: \"cf4cd190-d5bf-4367-ad51-61a1e6a873f4\") " Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.023644 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cf4cd190-d5bf-4367-ad51-61a1e6a873f4" (UID: "cf4cd190-d5bf-4367-ad51-61a1e6a873f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.025741 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-kube-api-access-6w7sk" (OuterVolumeSpecName: "kube-api-access-6w7sk") pod "cf4cd190-d5bf-4367-ad51-61a1e6a873f4" (UID: "cf4cd190-d5bf-4367-ad51-61a1e6a873f4"). InnerVolumeSpecName "kube-api-access-6w7sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.109342 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.125022 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w7sk\" (UniqueName: \"kubernetes.io/projected/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-kube-api-access-6w7sk\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.125071 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf4cd190-d5bf-4367-ad51-61a1e6a873f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.226661 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-catalog-content\") pod \"86c033a7-03f5-4309-a06d-61cdd44f3485\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.226824 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-utilities\") pod \"86c033a7-03f5-4309-a06d-61cdd44f3485\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.226852 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qft27\" (UniqueName: \"kubernetes.io/projected/86c033a7-03f5-4309-a06d-61cdd44f3485-kube-api-access-qft27\") pod \"86c033a7-03f5-4309-a06d-61cdd44f3485\" (UID: \"86c033a7-03f5-4309-a06d-61cdd44f3485\") " Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.227762 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-utilities" (OuterVolumeSpecName: "utilities") pod "86c033a7-03f5-4309-a06d-61cdd44f3485" (UID: "86c033a7-03f5-4309-a06d-61cdd44f3485"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.230933 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86c033a7-03f5-4309-a06d-61cdd44f3485-kube-api-access-qft27" (OuterVolumeSpecName: "kube-api-access-qft27") pod "86c033a7-03f5-4309-a06d-61cdd44f3485" (UID: "86c033a7-03f5-4309-a06d-61cdd44f3485"). InnerVolumeSpecName "kube-api-access-qft27". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.244490 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86c033a7-03f5-4309-a06d-61cdd44f3485" (UID: "86c033a7-03f5-4309-a06d-61cdd44f3485"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.328497 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.328543 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qft27\" (UniqueName: \"kubernetes.io/projected/86c033a7-03f5-4309-a06d-61cdd44f3485-kube-api-access-qft27\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.328554 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86c033a7-03f5-4309-a06d-61cdd44f3485-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.620699 4879 generic.go:334] "Generic (PLEG): container finished" podID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerID="71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389" exitCode=0 Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.620761 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dsnn4" event={"ID":"86c033a7-03f5-4309-a06d-61cdd44f3485","Type":"ContainerDied","Data":"71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389"} Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.620773 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dsnn4" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.620786 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dsnn4" event={"ID":"86c033a7-03f5-4309-a06d-61cdd44f3485","Type":"ContainerDied","Data":"9513487cfd35098afdb2570bb1321236461d0ab2ca4553ce24b985c5fff35aca"} Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.620815 4879 scope.go:117] "RemoveContainer" containerID="71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.623354 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acee-account-create-nrwzm" event={"ID":"cf4cd190-d5bf-4367-ad51-61a1e6a873f4","Type":"ContainerDied","Data":"6162fe57eb89bc19dd5b92df2ff87c77ef6549a3522eefffdcbc57af03bc17a4"} Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.623535 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6162fe57eb89bc19dd5b92df2ff87c77ef6549a3522eefffdcbc57af03bc17a4" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.623696 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acee-account-create-nrwzm" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.652349 4879 scope.go:117] "RemoveContainer" containerID="1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.664323 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dsnn4"] Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.673985 4879 scope.go:117] "RemoveContainer" containerID="d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.677145 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dsnn4"] Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.690932 4879 scope.go:117] "RemoveContainer" containerID="71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389" Nov 25 16:03:54 crc kubenswrapper[4879]: E1125 16:03:54.691510 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389\": container with ID starting with 71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389 not found: ID does not exist" containerID="71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.691547 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389"} err="failed to get container status \"71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389\": rpc error: code = NotFound desc = could not find container \"71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389\": container with ID starting with 71f93ebadea37ef157aa19639f7f970f75909aa0c317dad8e9c4b45fbced1389 not found: ID does not exist" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.691573 4879 scope.go:117] "RemoveContainer" containerID="1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1" Nov 25 16:03:54 crc kubenswrapper[4879]: E1125 16:03:54.691922 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1\": container with ID starting with 1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1 not found: ID does not exist" containerID="1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.691970 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1"} err="failed to get container status \"1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1\": rpc error: code = NotFound desc = could not find container \"1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1\": container with ID starting with 1aaee7bc59544b4061706d40cb58c5ac1f5c93f5ed86b9ffeafbc2ddef903df1 not found: ID does not exist" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.691993 4879 scope.go:117] "RemoveContainer" containerID="d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2" Nov 25 16:03:54 crc kubenswrapper[4879]: E1125 16:03:54.692373 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2\": container with ID starting with d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2 not found: ID does not exist" containerID="d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2" Nov 25 16:03:54 crc kubenswrapper[4879]: I1125 16:03:54.692396 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2"} err="failed to get container status \"d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2\": rpc error: code = NotFound desc = could not find container \"d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2\": container with ID starting with d3ee21fc9267a8da783c849802794ceb40a8e460869d3b1c73a4a795bb61bac2 not found: ID does not exist" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.658221 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" path="/var/lib/kubelet/pods/86c033a7-03f5-4309-a06d-61cdd44f3485/volumes" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.705139 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-hxldl"] Nov 25 16:03:55 crc kubenswrapper[4879]: E1125 16:03:55.705682 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db08830b-81f9-4ec0-a944-2ce3d14283ab" containerName="mariadb-database-create" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.705708 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="db08830b-81f9-4ec0-a944-2ce3d14283ab" containerName="mariadb-database-create" Nov 25 16:03:55 crc kubenswrapper[4879]: E1125 16:03:55.705729 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="extract-utilities" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.705738 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="extract-utilities" Nov 25 16:03:55 crc kubenswrapper[4879]: E1125 16:03:55.705762 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4cd190-d5bf-4367-ad51-61a1e6a873f4" containerName="mariadb-account-create" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.705770 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4cd190-d5bf-4367-ad51-61a1e6a873f4" containerName="mariadb-account-create" Nov 25 16:03:55 crc kubenswrapper[4879]: E1125 16:03:55.705791 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="registry-server" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.705800 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="registry-server" Nov 25 16:03:55 crc kubenswrapper[4879]: E1125 16:03:55.705812 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="extract-content" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.705819 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="extract-content" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.706036 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="86c033a7-03f5-4309-a06d-61cdd44f3485" containerName="registry-server" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.706062 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="db08830b-81f9-4ec0-a944-2ce3d14283ab" containerName="mariadb-database-create" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.706087 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4cd190-d5bf-4367-ad51-61a1e6a873f4" containerName="mariadb-account-create" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.706873 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.709033 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qfprh" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.709081 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.710767 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.714646 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-hxldl"] Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.750009 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-config-data\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.750298 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvz5v\" (UniqueName: \"kubernetes.io/projected/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-kube-api-access-nvz5v\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.750368 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-etc-machine-id\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.750531 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-combined-ca-bundle\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.750602 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-scripts\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.750667 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-db-sync-config-data\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.851852 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-config-data\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.851977 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvz5v\" (UniqueName: \"kubernetes.io/projected/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-kube-api-access-nvz5v\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.852014 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-etc-machine-id\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.852063 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-combined-ca-bundle\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.852092 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-scripts\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.852136 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-db-sync-config-data\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.852248 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-etc-machine-id\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.856427 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-db-sync-config-data\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.857609 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-config-data\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.858074 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-scripts\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.858165 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-combined-ca-bundle\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:55 crc kubenswrapper[4879]: I1125 16:03:55.871863 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvz5v\" (UniqueName: \"kubernetes.io/projected/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-kube-api-access-nvz5v\") pod \"cinder-db-sync-hxldl\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:56 crc kubenswrapper[4879]: I1125 16:03:56.024382 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxldl" Nov 25 16:03:56 crc kubenswrapper[4879]: I1125 16:03:56.471818 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-hxldl"] Nov 25 16:03:56 crc kubenswrapper[4879]: I1125 16:03:56.646047 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxldl" event={"ID":"b459cfc1-d98a-40f9-b99e-8a9c71cadf08","Type":"ContainerStarted","Data":"783842580a85f2d43ceb14c5373049b3cca0acca0678fbdf1f996c19c2ed2755"} Nov 25 16:03:57 crc kubenswrapper[4879]: I1125 16:03:57.663331 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxldl" event={"ID":"b459cfc1-d98a-40f9-b99e-8a9c71cadf08","Type":"ContainerStarted","Data":"a6c1239f0ad03c18ac75cef116e46f0dfe3ef469ca43285b89867b532bc3a01a"} Nov 25 16:03:57 crc kubenswrapper[4879]: I1125 16:03:57.693364 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-hxldl" podStartSLOduration=2.693341176 podStartE2EDuration="2.693341176s" podCreationTimestamp="2025-11-25 16:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:03:57.677475859 +0000 UTC m=+5929.280888930" watchObservedRunningTime="2025-11-25 16:03:57.693341176 +0000 UTC m=+5929.296754247" Nov 25 16:03:59 crc kubenswrapper[4879]: I1125 16:03:59.652268 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:03:59 crc kubenswrapper[4879]: E1125 16:03:59.653256 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:04:02 crc kubenswrapper[4879]: I1125 16:04:02.709831 4879 generic.go:334] "Generic (PLEG): container finished" podID="b459cfc1-d98a-40f9-b99e-8a9c71cadf08" containerID="a6c1239f0ad03c18ac75cef116e46f0dfe3ef469ca43285b89867b532bc3a01a" exitCode=0 Nov 25 16:04:02 crc kubenswrapper[4879]: I1125 16:04:02.709958 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxldl" event={"ID":"b459cfc1-d98a-40f9-b99e-8a9c71cadf08","Type":"ContainerDied","Data":"a6c1239f0ad03c18ac75cef116e46f0dfe3ef469ca43285b89867b532bc3a01a"} Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.062966 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxldl" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.102905 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-combined-ca-bundle\") pod \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.103005 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-config-data\") pod \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.103167 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-scripts\") pod \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.103271 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-db-sync-config-data\") pod \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.103351 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvz5v\" (UniqueName: \"kubernetes.io/projected/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-kube-api-access-nvz5v\") pod \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.103402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-etc-machine-id\") pod \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\" (UID: \"b459cfc1-d98a-40f9-b99e-8a9c71cadf08\") " Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.103628 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b459cfc1-d98a-40f9-b99e-8a9c71cadf08" (UID: "b459cfc1-d98a-40f9-b99e-8a9c71cadf08"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.104394 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.108968 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-scripts" (OuterVolumeSpecName: "scripts") pod "b459cfc1-d98a-40f9-b99e-8a9c71cadf08" (UID: "b459cfc1-d98a-40f9-b99e-8a9c71cadf08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.109114 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b459cfc1-d98a-40f9-b99e-8a9c71cadf08" (UID: "b459cfc1-d98a-40f9-b99e-8a9c71cadf08"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.109110 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-kube-api-access-nvz5v" (OuterVolumeSpecName: "kube-api-access-nvz5v") pod "b459cfc1-d98a-40f9-b99e-8a9c71cadf08" (UID: "b459cfc1-d98a-40f9-b99e-8a9c71cadf08"). InnerVolumeSpecName "kube-api-access-nvz5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.129539 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b459cfc1-d98a-40f9-b99e-8a9c71cadf08" (UID: "b459cfc1-d98a-40f9-b99e-8a9c71cadf08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.158805 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-config-data" (OuterVolumeSpecName: "config-data") pod "b459cfc1-d98a-40f9-b99e-8a9c71cadf08" (UID: "b459cfc1-d98a-40f9-b99e-8a9c71cadf08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.206335 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.206391 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.206405 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.206416 4879 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.206426 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvz5v\" (UniqueName: \"kubernetes.io/projected/b459cfc1-d98a-40f9-b99e-8a9c71cadf08-kube-api-access-nvz5v\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.728112 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxldl" event={"ID":"b459cfc1-d98a-40f9-b99e-8a9c71cadf08","Type":"ContainerDied","Data":"783842580a85f2d43ceb14c5373049b3cca0acca0678fbdf1f996c19c2ed2755"} Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.728172 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="783842580a85f2d43ceb14c5373049b3cca0acca0678fbdf1f996c19c2ed2755" Nov 25 16:04:04 crc kubenswrapper[4879]: I1125 16:04:04.728209 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxldl" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.182733 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86f775599f-txqnp"] Nov 25 16:04:05 crc kubenswrapper[4879]: E1125 16:04:05.183478 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b459cfc1-d98a-40f9-b99e-8a9c71cadf08" containerName="cinder-db-sync" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.183495 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b459cfc1-d98a-40f9-b99e-8a9c71cadf08" containerName="cinder-db-sync" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.183695 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b459cfc1-d98a-40f9-b99e-8a9c71cadf08" containerName="cinder-db-sync" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.184658 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.204267 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86f775599f-txqnp"] Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.324148 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw8bv\" (UniqueName: \"kubernetes.io/projected/3a724b1f-c257-4848-b4c5-f63cea4928d9-kube-api-access-rw8bv\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.324199 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-nb\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.324449 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-dns-svc\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.324507 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-sb\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.324551 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-config\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.335562 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.337499 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.340330 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qfprh" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.340480 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.340628 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.340683 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.353959 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.426175 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw8bv\" (UniqueName: \"kubernetes.io/projected/3a724b1f-c257-4848-b4c5-f63cea4928d9-kube-api-access-rw8bv\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.426219 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-nb\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.426289 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-dns-svc\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.426306 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-sb\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.426326 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-config\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.427287 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-sb\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.427311 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-config\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.427287 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-nb\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.427352 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-dns-svc\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.446165 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw8bv\" (UniqueName: \"kubernetes.io/projected/3a724b1f-c257-4848-b4c5-f63cea4928d9-kube-api-access-rw8bv\") pod \"dnsmasq-dns-86f775599f-txqnp\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.506338 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528212 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-scripts\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528253 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0da1dc04-65d7-4c21-8846-887add0b4807-logs\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528297 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528423 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvzws\" (UniqueName: \"kubernetes.io/projected/0da1dc04-65d7-4c21-8846-887add0b4807-kube-api-access-hvzws\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528481 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528571 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0da1dc04-65d7-4c21-8846-887add0b4807-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.528629 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data-custom\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.630586 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-scripts\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.631084 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0da1dc04-65d7-4c21-8846-887add0b4807-logs\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.631138 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.631228 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvzws\" (UniqueName: \"kubernetes.io/projected/0da1dc04-65d7-4c21-8846-887add0b4807-kube-api-access-hvzws\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.631275 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.631293 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0da1dc04-65d7-4c21-8846-887add0b4807-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.631312 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data-custom\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.632310 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0da1dc04-65d7-4c21-8846-887add0b4807-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.632946 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0da1dc04-65d7-4c21-8846-887add0b4807-logs\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.637870 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-scripts\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.641934 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data-custom\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.643879 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.651707 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.652277 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvzws\" (UniqueName: \"kubernetes.io/projected/0da1dc04-65d7-4c21-8846-887add0b4807-kube-api-access-hvzws\") pod \"cinder-api-0\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " pod="openstack/cinder-api-0" Nov 25 16:04:05 crc kubenswrapper[4879]: I1125 16:04:05.655335 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 16:04:06 crc kubenswrapper[4879]: I1125 16:04:06.015128 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86f775599f-txqnp"] Nov 25 16:04:06 crc kubenswrapper[4879]: I1125 16:04:06.217913 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:06 crc kubenswrapper[4879]: I1125 16:04:06.763901 4879 generic.go:334] "Generic (PLEG): container finished" podID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerID="6642773a85fd1e63d7cbc8b00f01e2da008eebae79a871f85ba7190eed242ee9" exitCode=0 Nov 25 16:04:06 crc kubenswrapper[4879]: I1125 16:04:06.763972 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f775599f-txqnp" event={"ID":"3a724b1f-c257-4848-b4c5-f63cea4928d9","Type":"ContainerDied","Data":"6642773a85fd1e63d7cbc8b00f01e2da008eebae79a871f85ba7190eed242ee9"} Nov 25 16:04:06 crc kubenswrapper[4879]: I1125 16:04:06.764003 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f775599f-txqnp" event={"ID":"3a724b1f-c257-4848-b4c5-f63cea4928d9","Type":"ContainerStarted","Data":"52fa05bd2264ebdfb9a583236c981fae9e6103bb240b7fbfcee82b0b6a8de6ad"} Nov 25 16:04:06 crc kubenswrapper[4879]: I1125 16:04:06.766494 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0da1dc04-65d7-4c21-8846-887add0b4807","Type":"ContainerStarted","Data":"a4f5690c3ce4f30de559ede8a824871d09f096162f874e75df6b2517eeee1e97"} Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.780053 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0da1dc04-65d7-4c21-8846-887add0b4807","Type":"ContainerStarted","Data":"b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95"} Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.780909 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.780929 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0da1dc04-65d7-4c21-8846-887add0b4807","Type":"ContainerStarted","Data":"c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d"} Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.783477 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f775599f-txqnp" event={"ID":"3a724b1f-c257-4848-b4c5-f63cea4928d9","Type":"ContainerStarted","Data":"153887253f1dd05ebdb9f88e7098c11b8629d69c0f0e8bb6429cdc354a1b056e"} Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.783673 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.807829 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.807809104 podStartE2EDuration="2.807809104s" podCreationTimestamp="2025-11-25 16:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:07.801865725 +0000 UTC m=+5939.405278796" watchObservedRunningTime="2025-11-25 16:04:07.807809104 +0000 UTC m=+5939.411222185" Nov 25 16:04:07 crc kubenswrapper[4879]: I1125 16:04:07.823736 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86f775599f-txqnp" podStartSLOduration=2.823713612 podStartE2EDuration="2.823713612s" podCreationTimestamp="2025-11-25 16:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:07.822394777 +0000 UTC m=+5939.425807848" watchObservedRunningTime="2025-11-25 16:04:07.823713612 +0000 UTC m=+5939.427126683" Nov 25 16:04:10 crc kubenswrapper[4879]: I1125 16:04:10.644618 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:04:10 crc kubenswrapper[4879]: E1125 16:04:10.645514 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:04:15 crc kubenswrapper[4879]: I1125 16:04:15.508370 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:04:15 crc kubenswrapper[4879]: I1125 16:04:15.568241 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67c7c5f9c9-ktnmr"] Nov 25 16:04:15 crc kubenswrapper[4879]: I1125 16:04:15.569076 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerName="dnsmasq-dns" containerID="cri-o://b1627d80034bbc1d330225cc9d41ddf2f84ffa8d9df578ffc4747c90f67130cc" gracePeriod=10 Nov 25 16:04:15 crc kubenswrapper[4879]: I1125 16:04:15.870204 4879 generic.go:334] "Generic (PLEG): container finished" podID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerID="b1627d80034bbc1d330225cc9d41ddf2f84ffa8d9df578ffc4747c90f67130cc" exitCode=0 Nov 25 16:04:15 crc kubenswrapper[4879]: I1125 16:04:15.870256 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" event={"ID":"3e57cd23-6502-4059-8d9f-99707a23b4b4","Type":"ContainerDied","Data":"b1627d80034bbc1d330225cc9d41ddf2f84ffa8d9df578ffc4747c90f67130cc"} Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.121690 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.217304 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-sb\") pod \"3e57cd23-6502-4059-8d9f-99707a23b4b4\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.217433 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6h4n\" (UniqueName: \"kubernetes.io/projected/3e57cd23-6502-4059-8d9f-99707a23b4b4-kube-api-access-s6h4n\") pod \"3e57cd23-6502-4059-8d9f-99707a23b4b4\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.217480 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-dns-svc\") pod \"3e57cd23-6502-4059-8d9f-99707a23b4b4\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.217508 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-nb\") pod \"3e57cd23-6502-4059-8d9f-99707a23b4b4\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.217528 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-config\") pod \"3e57cd23-6502-4059-8d9f-99707a23b4b4\" (UID: \"3e57cd23-6502-4059-8d9f-99707a23b4b4\") " Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.247095 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e57cd23-6502-4059-8d9f-99707a23b4b4-kube-api-access-s6h4n" (OuterVolumeSpecName: "kube-api-access-s6h4n") pod "3e57cd23-6502-4059-8d9f-99707a23b4b4" (UID: "3e57cd23-6502-4059-8d9f-99707a23b4b4"). InnerVolumeSpecName "kube-api-access-s6h4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.276418 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e57cd23-6502-4059-8d9f-99707a23b4b4" (UID: "3e57cd23-6502-4059-8d9f-99707a23b4b4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.282233 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e57cd23-6502-4059-8d9f-99707a23b4b4" (UID: "3e57cd23-6502-4059-8d9f-99707a23b4b4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.296636 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e57cd23-6502-4059-8d9f-99707a23b4b4" (UID: "3e57cd23-6502-4059-8d9f-99707a23b4b4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.297354 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-config" (OuterVolumeSpecName: "config") pod "3e57cd23-6502-4059-8d9f-99707a23b4b4" (UID: "3e57cd23-6502-4059-8d9f-99707a23b4b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.320306 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.320349 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6h4n\" (UniqueName: \"kubernetes.io/projected/3e57cd23-6502-4059-8d9f-99707a23b4b4-kube-api-access-s6h4n\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.320362 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.320373 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.320384 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e57cd23-6502-4059-8d9f-99707a23b4b4-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.844299 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.844558 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" containerName="nova-cell0-conductor-conductor" containerID="cri-o://b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.856277 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.856573 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-log" containerID="cri-o://412dcc7cbf510b3ae43bbac4a484c394bdc03e83273b5bbd5f095b3569b7cf4b" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.857242 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-api" containerID="cri-o://c8d00db643bb70b03b1122054d30fd3cf5e95f9800ef4c151258068b1a56f710" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.896744 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.897113 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.897072 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67c7c5f9c9-ktnmr" event={"ID":"3e57cd23-6502-4059-8d9f-99707a23b4b4","Type":"ContainerDied","Data":"3ed8d79759cd26963d212c04a64490229e0c9c25ac84a1066e3082065d67cd16"} Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.897372 4879 scope.go:117] "RemoveContainer" containerID="b1627d80034bbc1d330225cc9d41ddf2f84ffa8d9df578ffc4747c90f67130cc" Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.897717 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="3b1146ac-90b0-4934-8280-626f8842aa5a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.905042 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.905266 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4223a71e-c436-4808-b86a-971246d6ef92" containerName="nova-scheduler-scheduler" containerID="cri-o://5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.946221 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.946666 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-log" containerID="cri-o://eee429f2eb67288e9eabb20211fb4fd9768f7f5c6a556a75f790c7a5f3fd88e3" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.946921 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-metadata" containerID="cri-o://52ebfc79e22cbc7a62153d5017b54e7dfef08229c54c4e6444748694c712a053" gracePeriod=30 Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.987720 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67c7c5f9c9-ktnmr"] Nov 25 16:04:16 crc kubenswrapper[4879]: I1125 16:04:16.990081 4879 scope.go:117] "RemoveContainer" containerID="0705b3b9219373f765e4b32d691a347cd667b110a3802b41190f319bb9680dba" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.045023 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67c7c5f9c9-ktnmr"] Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.662658 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" path="/var/lib/kubelet/pods/3e57cd23-6502-4059-8d9f-99707a23b4b4/volumes" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.718831 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.900473 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.932576 4879 generic.go:334] "Generic (PLEG): container finished" podID="3b1146ac-90b0-4934-8280-626f8842aa5a" containerID="c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515" exitCode=0 Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.932631 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.932644 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3b1146ac-90b0-4934-8280-626f8842aa5a","Type":"ContainerDied","Data":"c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515"} Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.933515 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3b1146ac-90b0-4934-8280-626f8842aa5a","Type":"ContainerDied","Data":"a9ac65e09ec44fa52acf83a27558d04b1146002dc5eb478ba737aa7a93ff6117"} Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.933533 4879 scope.go:117] "RemoveContainer" containerID="c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.942833 4879 generic.go:334] "Generic (PLEG): container finished" podID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerID="eee429f2eb67288e9eabb20211fb4fd9768f7f5c6a556a75f790c7a5f3fd88e3" exitCode=143 Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.942909 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"62c2b162-8b9a-4c6f-a29c-03a03beeb22c","Type":"ContainerDied","Data":"eee429f2eb67288e9eabb20211fb4fd9768f7f5c6a556a75f790c7a5f3fd88e3"} Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.950035 4879 generic.go:334] "Generic (PLEG): container finished" podID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerID="412dcc7cbf510b3ae43bbac4a484c394bdc03e83273b5bbd5f095b3569b7cf4b" exitCode=143 Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.950260 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c049fa46-6a96-405d-9cf7-a86978fdd705","Type":"ContainerDied","Data":"412dcc7cbf510b3ae43bbac4a484c394bdc03e83273b5bbd5f095b3569b7cf4b"} Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.979976 4879 scope.go:117] "RemoveContainer" containerID="c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515" Nov 25 16:04:17 crc kubenswrapper[4879]: E1125 16:04:17.980466 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515\": container with ID starting with c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515 not found: ID does not exist" containerID="c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515" Nov 25 16:04:17 crc kubenswrapper[4879]: I1125 16:04:17.980515 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515"} err="failed to get container status \"c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515\": rpc error: code = NotFound desc = could not find container \"c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515\": container with ID starting with c781518242bacc2e33801d492eeb9711b8504da7b53d0460b6ba6a1ecda6e515 not found: ID does not exist" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.056135 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-config-data\") pod \"3b1146ac-90b0-4934-8280-626f8842aa5a\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.056346 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp8f2\" (UniqueName: \"kubernetes.io/projected/3b1146ac-90b0-4934-8280-626f8842aa5a-kube-api-access-sp8f2\") pod \"3b1146ac-90b0-4934-8280-626f8842aa5a\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.056435 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-combined-ca-bundle\") pod \"3b1146ac-90b0-4934-8280-626f8842aa5a\" (UID: \"3b1146ac-90b0-4934-8280-626f8842aa5a\") " Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.063438 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1146ac-90b0-4934-8280-626f8842aa5a-kube-api-access-sp8f2" (OuterVolumeSpecName: "kube-api-access-sp8f2") pod "3b1146ac-90b0-4934-8280-626f8842aa5a" (UID: "3b1146ac-90b0-4934-8280-626f8842aa5a"). InnerVolumeSpecName "kube-api-access-sp8f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.082400 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-config-data" (OuterVolumeSpecName: "config-data") pod "3b1146ac-90b0-4934-8280-626f8842aa5a" (UID: "3b1146ac-90b0-4934-8280-626f8842aa5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.087491 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b1146ac-90b0-4934-8280-626f8842aa5a" (UID: "3b1146ac-90b0-4934-8280-626f8842aa5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.158612 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.158644 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp8f2\" (UniqueName: \"kubernetes.io/projected/3b1146ac-90b0-4934-8280-626f8842aa5a-kube-api-access-sp8f2\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.158654 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1146ac-90b0-4934-8280-626f8842aa5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.295106 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.316274 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.329307 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:04:18 crc kubenswrapper[4879]: E1125 16:04:18.329922 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1146ac-90b0-4934-8280-626f8842aa5a" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.329991 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1146ac-90b0-4934-8280-626f8842aa5a" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 16:04:18 crc kubenswrapper[4879]: E1125 16:04:18.330097 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerName="init" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.330178 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerName="init" Nov 25 16:04:18 crc kubenswrapper[4879]: E1125 16:04:18.330262 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerName="dnsmasq-dns" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.330318 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerName="dnsmasq-dns" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.330552 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e57cd23-6502-4059-8d9f-99707a23b4b4" containerName="dnsmasq-dns" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.330638 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b1146ac-90b0-4934-8280-626f8842aa5a" containerName="nova-cell1-novncproxy-novncproxy" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.331294 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.333582 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.347826 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.484875 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtk25\" (UniqueName: \"kubernetes.io/projected/4d5221af-ab36-4a01-8c73-219f8bb76568-kube-api-access-xtk25\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.484933 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d5221af-ab36-4a01-8c73-219f8bb76568-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.484962 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d5221af-ab36-4a01-8c73-219f8bb76568-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.586546 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtk25\" (UniqueName: \"kubernetes.io/projected/4d5221af-ab36-4a01-8c73-219f8bb76568-kube-api-access-xtk25\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.586623 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d5221af-ab36-4a01-8c73-219f8bb76568-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.586661 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d5221af-ab36-4a01-8c73-219f8bb76568-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.590213 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d5221af-ab36-4a01-8c73-219f8bb76568-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.590753 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d5221af-ab36-4a01-8c73-219f8bb76568-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.616753 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtk25\" (UniqueName: \"kubernetes.io/projected/4d5221af-ab36-4a01-8c73-219f8bb76568-kube-api-access-xtk25\") pod \"nova-cell1-novncproxy-0\" (UID: \"4d5221af-ab36-4a01-8c73-219f8bb76568\") " pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:18 crc kubenswrapper[4879]: I1125 16:04:18.650619 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:19 crc kubenswrapper[4879]: I1125 16:04:19.091452 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 25 16:04:19 crc kubenswrapper[4879]: W1125 16:04:19.106713 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d5221af_ab36_4a01_8c73_219f8bb76568.slice/crio-04d5867e0fa88a967f09d0c924a14cfee5a2e51279a8fd7b39a053451a425bd1 WatchSource:0}: Error finding container 04d5867e0fa88a967f09d0c924a14cfee5a2e51279a8fd7b39a053451a425bd1: Status 404 returned error can't find the container with id 04d5867e0fa88a967f09d0c924a14cfee5a2e51279a8fd7b39a053451a425bd1 Nov 25 16:04:19 crc kubenswrapper[4879]: E1125 16:04:19.139951 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 16:04:19 crc kubenswrapper[4879]: E1125 16:04:19.141208 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 16:04:19 crc kubenswrapper[4879]: E1125 16:04:19.144112 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 16:04:19 crc kubenswrapper[4879]: E1125 16:04:19.144210 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" containerName="nova-cell0-conductor-conductor" Nov 25 16:04:19 crc kubenswrapper[4879]: I1125 16:04:19.655615 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b1146ac-90b0-4934-8280-626f8842aa5a" path="/var/lib/kubelet/pods/3b1146ac-90b0-4934-8280-626f8842aa5a/volumes" Nov 25 16:04:19 crc kubenswrapper[4879]: I1125 16:04:19.975321 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4d5221af-ab36-4a01-8c73-219f8bb76568","Type":"ContainerStarted","Data":"11af02abbc8a4bc5bf789e878138c32e4c119637cc1ea557b06c7c9509d38edf"} Nov 25 16:04:19 crc kubenswrapper[4879]: I1125 16:04:19.975366 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4d5221af-ab36-4a01-8c73-219f8bb76568","Type":"ContainerStarted","Data":"04d5867e0fa88a967f09d0c924a14cfee5a2e51279a8fd7b39a053451a425bd1"} Nov 25 16:04:19 crc kubenswrapper[4879]: I1125 16:04:19.991383 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.9913677330000001 podStartE2EDuration="1.991367733s" podCreationTimestamp="2025-11-25 16:04:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:19.988010122 +0000 UTC m=+5951.591423193" watchObservedRunningTime="2025-11-25 16:04:19.991367733 +0000 UTC m=+5951.594780804" Nov 25 16:04:20 crc kubenswrapper[4879]: I1125 16:04:20.467441 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:04:20 crc kubenswrapper[4879]: I1125 16:04:20.470721 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="ced1f22f-b283-43b3-a85a-e591ff1dbb27" containerName="nova-cell1-conductor-conductor" containerID="cri-o://666b9994e53fb06538c80a83678bf387cebfa43ef1b6ae158ff69ade37c21dcb" gracePeriod=30 Nov 25 16:04:20 crc kubenswrapper[4879]: I1125 16:04:20.998346 4879 generic.go:334] "Generic (PLEG): container finished" podID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerID="52ebfc79e22cbc7a62153d5017b54e7dfef08229c54c4e6444748694c712a053" exitCode=0 Nov 25 16:04:20 crc kubenswrapper[4879]: I1125 16:04:20.998428 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"62c2b162-8b9a-4c6f-a29c-03a03beeb22c","Type":"ContainerDied","Data":"52ebfc79e22cbc7a62153d5017b54e7dfef08229c54c4e6444748694c712a053"} Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.005250 4879 generic.go:334] "Generic (PLEG): container finished" podID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerID="c8d00db643bb70b03b1122054d30fd3cf5e95f9800ef4c151258068b1a56f710" exitCode=0 Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.006375 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c049fa46-6a96-405d-9cf7-a86978fdd705","Type":"ContainerDied","Data":"c8d00db643bb70b03b1122054d30fd3cf5e95f9800ef4c151258068b1a56f710"} Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.006412 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c049fa46-6a96-405d-9cf7-a86978fdd705","Type":"ContainerDied","Data":"ed2d0720413274061a31e11ebfe448eb8b897b7720434221c09bb7aa8b5760ec"} Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.006427 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed2d0720413274061a31e11ebfe448eb8b897b7720434221c09bb7aa8b5760ec" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.015829 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.147473 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87ckk\" (UniqueName: \"kubernetes.io/projected/c049fa46-6a96-405d-9cf7-a86978fdd705-kube-api-access-87ckk\") pod \"c049fa46-6a96-405d-9cf7-a86978fdd705\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.147781 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-config-data\") pod \"c049fa46-6a96-405d-9cf7-a86978fdd705\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.148097 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-combined-ca-bundle\") pod \"c049fa46-6a96-405d-9cf7-a86978fdd705\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.148220 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c049fa46-6a96-405d-9cf7-a86978fdd705-logs\") pod \"c049fa46-6a96-405d-9cf7-a86978fdd705\" (UID: \"c049fa46-6a96-405d-9cf7-a86978fdd705\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.150663 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c049fa46-6a96-405d-9cf7-a86978fdd705-logs" (OuterVolumeSpecName: "logs") pod "c049fa46-6a96-405d-9cf7-a86978fdd705" (UID: "c049fa46-6a96-405d-9cf7-a86978fdd705"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.156873 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c049fa46-6a96-405d-9cf7-a86978fdd705-kube-api-access-87ckk" (OuterVolumeSpecName: "kube-api-access-87ckk") pod "c049fa46-6a96-405d-9cf7-a86978fdd705" (UID: "c049fa46-6a96-405d-9cf7-a86978fdd705"). InnerVolumeSpecName "kube-api-access-87ckk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.210282 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-config-data" (OuterVolumeSpecName: "config-data") pod "c049fa46-6a96-405d-9cf7-a86978fdd705" (UID: "c049fa46-6a96-405d-9cf7-a86978fdd705"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.233771 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c049fa46-6a96-405d-9cf7-a86978fdd705" (UID: "c049fa46-6a96-405d-9cf7-a86978fdd705"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.250865 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.250909 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c049fa46-6a96-405d-9cf7-a86978fdd705-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.250919 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87ckk\" (UniqueName: \"kubernetes.io/projected/c049fa46-6a96-405d-9cf7-a86978fdd705-kube-api-access-87ckk\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.250929 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c049fa46-6a96-405d-9cf7-a86978fdd705-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.263263 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.351612 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-logs\") pod \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.351712 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v7kc\" (UniqueName: \"kubernetes.io/projected/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-kube-api-access-8v7kc\") pod \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.351892 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-combined-ca-bundle\") pod \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.351970 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-config-data\") pod \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\" (UID: \"62c2b162-8b9a-4c6f-a29c-03a03beeb22c\") " Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.352072 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-logs" (OuterVolumeSpecName: "logs") pod "62c2b162-8b9a-4c6f-a29c-03a03beeb22c" (UID: "62c2b162-8b9a-4c6f-a29c-03a03beeb22c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.352447 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.355452 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-kube-api-access-8v7kc" (OuterVolumeSpecName: "kube-api-access-8v7kc") pod "62c2b162-8b9a-4c6f-a29c-03a03beeb22c" (UID: "62c2b162-8b9a-4c6f-a29c-03a03beeb22c"). InnerVolumeSpecName "kube-api-access-8v7kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.378549 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62c2b162-8b9a-4c6f-a29c-03a03beeb22c" (UID: "62c2b162-8b9a-4c6f-a29c-03a03beeb22c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.385716 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-config-data" (OuterVolumeSpecName: "config-data") pod "62c2b162-8b9a-4c6f-a29c-03a03beeb22c" (UID: "62c2b162-8b9a-4c6f-a29c-03a03beeb22c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.453939 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.453984 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: I1125 16:04:21.454000 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v7kc\" (UniqueName: \"kubernetes.io/projected/62c2b162-8b9a-4c6f-a29c-03a03beeb22c-kube-api-access-8v7kc\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:21 crc kubenswrapper[4879]: E1125 16:04:21.655385 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 16:04:21 crc kubenswrapper[4879]: E1125 16:04:21.657528 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 16:04:21 crc kubenswrapper[4879]: E1125 16:04:21.661429 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 25 16:04:21 crc kubenswrapper[4879]: E1125 16:04:21.661495 4879 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="4223a71e-c436-4808-b86a-971246d6ef92" containerName="nova-scheduler-scheduler" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.100352 4879 generic.go:334] "Generic (PLEG): container finished" podID="ced1f22f-b283-43b3-a85a-e591ff1dbb27" containerID="666b9994e53fb06538c80a83678bf387cebfa43ef1b6ae158ff69ade37c21dcb" exitCode=0 Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.100746 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ced1f22f-b283-43b3-a85a-e591ff1dbb27","Type":"ContainerDied","Data":"666b9994e53fb06538c80a83678bf387cebfa43ef1b6ae158ff69ade37c21dcb"} Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.100778 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ced1f22f-b283-43b3-a85a-e591ff1dbb27","Type":"ContainerDied","Data":"8e35b49d16b21335bca4972a6bc46afcfffef4f114729e1664026ec8c31322c3"} Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.100791 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e35b49d16b21335bca4972a6bc46afcfffef4f114729e1664026ec8c31322c3" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.115709 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.133400 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.134206 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.134240 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"62c2b162-8b9a-4c6f-a29c-03a03beeb22c","Type":"ContainerDied","Data":"1ee586b0f763a09664d8ba89ffa8900aa7456d8d4f319ab52ca0f30b871a0488"} Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.134327 4879 scope.go:117] "RemoveContainer" containerID="52ebfc79e22cbc7a62153d5017b54e7dfef08229c54c4e6444748694c712a053" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.202151 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.210822 4879 scope.go:117] "RemoveContainer" containerID="eee429f2eb67288e9eabb20211fb4fd9768f7f5c6a556a75f790c7a5f3fd88e3" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.233383 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.271404 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: E1125 16:04:22.271824 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced1f22f-b283-43b3-a85a-e591ff1dbb27" containerName="nova-cell1-conductor-conductor" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.271835 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced1f22f-b283-43b3-a85a-e591ff1dbb27" containerName="nova-cell1-conductor-conductor" Nov 25 16:04:22 crc kubenswrapper[4879]: E1125 16:04:22.271868 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-log" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.271874 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-log" Nov 25 16:04:22 crc kubenswrapper[4879]: E1125 16:04:22.271888 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-api" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.271893 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-api" Nov 25 16:04:22 crc kubenswrapper[4879]: E1125 16:04:22.271902 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-metadata" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.271909 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-metadata" Nov 25 16:04:22 crc kubenswrapper[4879]: E1125 16:04:22.271935 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-log" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.271940 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-log" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.272100 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-api" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.272117 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" containerName="nova-api-log" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.272145 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced1f22f-b283-43b3-a85a-e591ff1dbb27" containerName="nova-cell1-conductor-conductor" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.272157 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-metadata" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.272172 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" containerName="nova-metadata-log" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.273198 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.280630 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.285010 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-combined-ca-bundle\") pod \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.285141 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-config-data\") pod \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.285211 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkdht\" (UniqueName: \"kubernetes.io/projected/ced1f22f-b283-43b3-a85a-e591ff1dbb27-kube-api-access-tkdht\") pod \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\" (UID: \"ced1f22f-b283-43b3-a85a-e591ff1dbb27\") " Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.289085 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced1f22f-b283-43b3-a85a-e591ff1dbb27-kube-api-access-tkdht" (OuterVolumeSpecName: "kube-api-access-tkdht") pod "ced1f22f-b283-43b3-a85a-e591ff1dbb27" (UID: "ced1f22f-b283-43b3-a85a-e591ff1dbb27"). InnerVolumeSpecName "kube-api-access-tkdht". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.294991 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.327843 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.339543 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ced1f22f-b283-43b3-a85a-e591ff1dbb27" (UID: "ced1f22f-b283-43b3-a85a-e591ff1dbb27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.341061 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-config-data" (OuterVolumeSpecName: "config-data") pod "ced1f22f-b283-43b3-a85a-e591ff1dbb27" (UID: "ced1f22f-b283-43b3-a85a-e591ff1dbb27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.343000 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.370434 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.372477 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.374082 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387472 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387560 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gpmm\" (UniqueName: \"kubernetes.io/projected/557da3e1-38c8-4663-85cf-14b4d0ca28b2-kube-api-access-6gpmm\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387682 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-config-data\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387718 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/557da3e1-38c8-4663-85cf-14b4d0ca28b2-logs\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387903 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387920 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkdht\" (UniqueName: \"kubernetes.io/projected/ced1f22f-b283-43b3-a85a-e591ff1dbb27-kube-api-access-tkdht\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387930 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ced1f22f-b283-43b3-a85a-e591ff1dbb27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.387996 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489342 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-logs\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489395 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489451 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gpmm\" (UniqueName: \"kubernetes.io/projected/557da3e1-38c8-4663-85cf-14b4d0ca28b2-kube-api-access-6gpmm\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489476 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-config-data\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489499 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489516 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ck22\" (UniqueName: \"kubernetes.io/projected/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-kube-api-access-2ck22\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489686 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-config-data\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.489769 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/557da3e1-38c8-4663-85cf-14b4d0ca28b2-logs\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.490358 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/557da3e1-38c8-4663-85cf-14b4d0ca28b2-logs\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.492632 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.506462 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-config-data\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.509344 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gpmm\" (UniqueName: \"kubernetes.io/projected/557da3e1-38c8-4663-85cf-14b4d0ca28b2-kube-api-access-6gpmm\") pod \"nova-metadata-0\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.591610 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-logs\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.591711 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-config-data\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.591740 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.591758 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ck22\" (UniqueName: \"kubernetes.io/projected/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-kube-api-access-2ck22\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.592059 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-logs\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.594771 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-config-data\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.594866 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.616471 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ck22\" (UniqueName: \"kubernetes.io/projected/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-kube-api-access-2ck22\") pod \"nova-api-0\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " pod="openstack/nova-api-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.653800 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:04:22 crc kubenswrapper[4879]: I1125 16:04:22.697489 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.014074 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.143933 4879 generic.go:334] "Generic (PLEG): container finished" podID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" exitCode=0 Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.144019 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.144327 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.144318 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627","Type":"ContainerDied","Data":"b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f"} Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.144528 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627","Type":"ContainerDied","Data":"b43f000034d98630f977e7bc7c10588a050a9539262b518276d23cc11b7c6815"} Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.144604 4879 scope.go:117] "RemoveContainer" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.170783 4879 scope.go:117] "RemoveContainer" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" Nov 25 16:04:23 crc kubenswrapper[4879]: E1125 16:04:23.171410 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f\": container with ID starting with b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f not found: ID does not exist" containerID="b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.171487 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f"} err="failed to get container status \"b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f\": rpc error: code = NotFound desc = could not find container \"b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f\": container with ID starting with b649409c4b65177575237ea6b15047d89a4682fbb880a10bc731a1df55ffc71f not found: ID does not exist" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.176603 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.185619 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.205256 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slwjq\" (UniqueName: \"kubernetes.io/projected/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-kube-api-access-slwjq\") pod \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.205345 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-config-data\") pod \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.205399 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-combined-ca-bundle\") pod \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\" (UID: \"cf5d8a4f-6f5f-4381-8282-abf6b2b2e627\") " Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.211863 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: E1125 16:04:23.212384 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" containerName="nova-cell0-conductor-conductor" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.212408 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" containerName="nova-cell0-conductor-conductor" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.212673 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" containerName="nova-cell0-conductor-conductor" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.213457 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.214170 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-kube-api-access-slwjq" (OuterVolumeSpecName: "kube-api-access-slwjq") pod "cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" (UID: "cf5d8a4f-6f5f-4381-8282-abf6b2b2e627"). InnerVolumeSpecName "kube-api-access-slwjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.215184 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.224228 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.235463 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.243049 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" (UID: "cf5d8a4f-6f5f-4381-8282-abf6b2b2e627"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.255823 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-config-data" (OuterVolumeSpecName: "config-data") pod "cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" (UID: "cf5d8a4f-6f5f-4381-8282-abf6b2b2e627"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.268272 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: W1125 16:04:23.276765 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod557da3e1_38c8_4663_85cf_14b4d0ca28b2.slice/crio-4b850a28b66c828f7854b00af831e57e1f1b2731558aae7473e2dbc921687683 WatchSource:0}: Error finding container 4b850a28b66c828f7854b00af831e57e1f1b2731558aae7473e2dbc921687683: Status 404 returned error can't find the container with id 4b850a28b66c828f7854b00af831e57e1f1b2731558aae7473e2dbc921687683 Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.307462 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqbw5\" (UniqueName: \"kubernetes.io/projected/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-kube-api-access-sqbw5\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.307626 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.307706 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.307930 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slwjq\" (UniqueName: \"kubernetes.io/projected/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-kube-api-access-slwjq\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.307951 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.307963 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.409473 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.409864 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.410023 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqbw5\" (UniqueName: \"kubernetes.io/projected/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-kube-api-access-sqbw5\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.414894 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.419509 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.426915 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqbw5\" (UniqueName: \"kubernetes.io/projected/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-kube-api-access-sqbw5\") pod \"nova-cell1-conductor-0\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.508237 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.536509 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: E1125 16:04:23.540751 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf5d8a4f_6f5f_4381_8282_abf6b2b2e627.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf5d8a4f_6f5f_4381_8282_abf6b2b2e627.slice/crio-b43f000034d98630f977e7bc7c10588a050a9539262b518276d23cc11b7c6815\": RecentStats: unable to find data in memory cache]" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.563102 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.599201 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.600693 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.605726 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.614002 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.648185 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:04:23 crc kubenswrapper[4879]: E1125 16:04:23.648576 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.666350 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c2b162-8b9a-4c6f-a29c-03a03beeb22c" path="/var/lib/kubelet/pods/62c2b162-8b9a-4c6f-a29c-03a03beeb22c/volumes" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.667354 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c049fa46-6a96-405d-9cf7-a86978fdd705" path="/var/lib/kubelet/pods/c049fa46-6a96-405d-9cf7-a86978fdd705/volumes" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.667925 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ced1f22f-b283-43b3-a85a-e591ff1dbb27" path="/var/lib/kubelet/pods/ced1f22f-b283-43b3-a85a-e591ff1dbb27/volumes" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.668869 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf5d8a4f-6f5f-4381-8282-abf6b2b2e627" path="/var/lib/kubelet/pods/cf5d8a4f-6f5f-4381-8282-abf6b2b2e627/volumes" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.669487 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.715804 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.716180 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9fbj\" (UniqueName: \"kubernetes.io/projected/b719ae29-7488-49f4-8899-859e6271d428-kube-api-access-c9fbj\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.716338 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.818224 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.818392 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9fbj\" (UniqueName: \"kubernetes.io/projected/b719ae29-7488-49f4-8899-859e6271d428-kube-api-access-c9fbj\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.819165 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.823056 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.824473 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:23 crc kubenswrapper[4879]: I1125 16:04:23.835741 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9fbj\" (UniqueName: \"kubernetes.io/projected/b719ae29-7488-49f4-8899-859e6271d428-kube-api-access-c9fbj\") pod \"nova-cell0-conductor-0\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.018238 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.025180 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.236473 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4","Type":"ContainerStarted","Data":"120d120230066419362a4ff48d3790b4753203ed6d6637d59dc1efcc1d1a37c3"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.236847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4","Type":"ContainerStarted","Data":"6aad49e0f6a4f078a1a8a1e1da15c03eba66261a5fbdcc7d890da1e9f55aa135"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.236860 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4","Type":"ContainerStarted","Data":"fa5a8b719111bb32487f8fcb8468a2caba787332b1721cb3fe80ac270dc09dce"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.268655 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.268636617 podStartE2EDuration="2.268636617s" podCreationTimestamp="2025-11-25 16:04:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:24.267511107 +0000 UTC m=+5955.870924178" watchObservedRunningTime="2025-11-25 16:04:24.268636617 +0000 UTC m=+5955.872049688" Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.270924 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"557da3e1-38c8-4663-85cf-14b4d0ca28b2","Type":"ContainerStarted","Data":"a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.270987 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"557da3e1-38c8-4663-85cf-14b4d0ca28b2","Type":"ContainerStarted","Data":"2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.270997 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"557da3e1-38c8-4663-85cf-14b4d0ca28b2","Type":"ContainerStarted","Data":"4b850a28b66c828f7854b00af831e57e1f1b2731558aae7473e2dbc921687683"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.329079 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.329057607 podStartE2EDuration="2.329057607s" podCreationTimestamp="2025-11-25 16:04:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:24.310768611 +0000 UTC m=+5955.914181682" watchObservedRunningTime="2025-11-25 16:04:24.329057607 +0000 UTC m=+5955.932470668" Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.333268 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5db21db9-0d2e-42eb-8f20-ea1af2d0641a","Type":"ContainerStarted","Data":"bb910c3152a1978ccc2295b13b68b3757f2cc1802d50126cece4b8668e189c82"} Nov 25 16:04:24 crc kubenswrapper[4879]: I1125 16:04:24.373626 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:04:24 crc kubenswrapper[4879]: W1125 16:04:24.374914 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb719ae29_7488_49f4_8899_859e6271d428.slice/crio-87349c583392da7693072c62c9340787f3e92bf6be1b181a502869b5bd0d5f5e WatchSource:0}: Error finding container 87349c583392da7693072c62c9340787f3e92bf6be1b181a502869b5bd0d5f5e: Status 404 returned error can't find the container with id 87349c583392da7693072c62c9340787f3e92bf6be1b181a502869b5bd0d5f5e Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.346068 4879 generic.go:334] "Generic (PLEG): container finished" podID="4223a71e-c436-4808-b86a-971246d6ef92" containerID="5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18" exitCode=0 Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.346238 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4223a71e-c436-4808-b86a-971246d6ef92","Type":"ContainerDied","Data":"5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18"} Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.348351 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b719ae29-7488-49f4-8899-859e6271d428","Type":"ContainerStarted","Data":"9e6034b84e23297b847e6429e0891118fefda2c5ab4f1ad763fc43e577af1bea"} Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.348375 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b719ae29-7488-49f4-8899-859e6271d428","Type":"ContainerStarted","Data":"87349c583392da7693072c62c9340787f3e92bf6be1b181a502869b5bd0d5f5e"} Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.348468 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.352361 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5db21db9-0d2e-42eb-8f20-ea1af2d0641a","Type":"ContainerStarted","Data":"5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2"} Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.353191 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.373269 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.373244762 podStartE2EDuration="2.373244762s" podCreationTimestamp="2025-11-25 16:04:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:25.363963051 +0000 UTC m=+5956.967376142" watchObservedRunningTime="2025-11-25 16:04:25.373244762 +0000 UTC m=+5956.976657853" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.396343 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.396321389 podStartE2EDuration="2.396321389s" podCreationTimestamp="2025-11-25 16:04:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:25.379560894 +0000 UTC m=+5956.982973965" watchObservedRunningTime="2025-11-25 16:04:25.396321389 +0000 UTC m=+5956.999734460" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.683929 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.871311 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-config-data\") pod \"4223a71e-c436-4808-b86a-971246d6ef92\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.871369 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-combined-ca-bundle\") pod \"4223a71e-c436-4808-b86a-971246d6ef92\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.871427 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4lkv\" (UniqueName: \"kubernetes.io/projected/4223a71e-c436-4808-b86a-971246d6ef92-kube-api-access-q4lkv\") pod \"4223a71e-c436-4808-b86a-971246d6ef92\" (UID: \"4223a71e-c436-4808-b86a-971246d6ef92\") " Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.876600 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4223a71e-c436-4808-b86a-971246d6ef92-kube-api-access-q4lkv" (OuterVolumeSpecName: "kube-api-access-q4lkv") pod "4223a71e-c436-4808-b86a-971246d6ef92" (UID: "4223a71e-c436-4808-b86a-971246d6ef92"). InnerVolumeSpecName "kube-api-access-q4lkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.901652 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4223a71e-c436-4808-b86a-971246d6ef92" (UID: "4223a71e-c436-4808-b86a-971246d6ef92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.911497 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-config-data" (OuterVolumeSpecName: "config-data") pod "4223a71e-c436-4808-b86a-971246d6ef92" (UID: "4223a71e-c436-4808-b86a-971246d6ef92"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.973347 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.973384 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4223a71e-c436-4808-b86a-971246d6ef92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:25 crc kubenswrapper[4879]: I1125 16:04:25.973401 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4lkv\" (UniqueName: \"kubernetes.io/projected/4223a71e-c436-4808-b86a-971246d6ef92-kube-api-access-q4lkv\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.364086 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.373572 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4223a71e-c436-4808-b86a-971246d6ef92","Type":"ContainerDied","Data":"4ec2bfe1ffe957bc9dbd4ed3394a9d5e918afe3d5628c452049bc2c92598164f"} Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.373631 4879 scope.go:117] "RemoveContainer" containerID="5a8ac6448e1714f16698b2c3dfd0ce74c0c8dc89b3f82b7b918e5dbc64d40d18" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.408949 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.424457 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.451992 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:04:26 crc kubenswrapper[4879]: E1125 16:04:26.452984 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4223a71e-c436-4808-b86a-971246d6ef92" containerName="nova-scheduler-scheduler" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.453009 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4223a71e-c436-4808-b86a-971246d6ef92" containerName="nova-scheduler-scheduler" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.453446 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4223a71e-c436-4808-b86a-971246d6ef92" containerName="nova-scheduler-scheduler" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.454511 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.458446 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.483242 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.584282 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p629q\" (UniqueName: \"kubernetes.io/projected/92111edc-c57b-453c-bfbb-508c29a7c0f0-kube-api-access-p629q\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.584354 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-config-data\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.584393 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.685664 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p629q\" (UniqueName: \"kubernetes.io/projected/92111edc-c57b-453c-bfbb-508c29a7c0f0-kube-api-access-p629q\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.685762 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-config-data\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.686949 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.691447 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-config-data\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.696669 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.719693 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p629q\" (UniqueName: \"kubernetes.io/projected/92111edc-c57b-453c-bfbb-508c29a7c0f0-kube-api-access-p629q\") pod \"nova-scheduler-0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " pod="openstack/nova-scheduler-0" Nov 25 16:04:26 crc kubenswrapper[4879]: I1125 16:04:26.781855 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:04:27 crc kubenswrapper[4879]: I1125 16:04:27.261500 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:04:27 crc kubenswrapper[4879]: W1125 16:04:27.280260 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92111edc_c57b_453c_bfbb_508c29a7c0f0.slice/crio-166b28ce10ca77985065d8dd6b0c2186643ba3c30f97cd3a02433ce0d9a7f23a WatchSource:0}: Error finding container 166b28ce10ca77985065d8dd6b0c2186643ba3c30f97cd3a02433ce0d9a7f23a: Status 404 returned error can't find the container with id 166b28ce10ca77985065d8dd6b0c2186643ba3c30f97cd3a02433ce0d9a7f23a Nov 25 16:04:27 crc kubenswrapper[4879]: I1125 16:04:27.379217 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92111edc-c57b-453c-bfbb-508c29a7c0f0","Type":"ContainerStarted","Data":"166b28ce10ca77985065d8dd6b0c2186643ba3c30f97cd3a02433ce0d9a7f23a"} Nov 25 16:04:27 crc kubenswrapper[4879]: I1125 16:04:27.661520 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4223a71e-c436-4808-b86a-971246d6ef92" path="/var/lib/kubelet/pods/4223a71e-c436-4808-b86a-971246d6ef92/volumes" Nov 25 16:04:27 crc kubenswrapper[4879]: I1125 16:04:27.662504 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:04:27 crc kubenswrapper[4879]: I1125 16:04:27.662536 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:04:28 crc kubenswrapper[4879]: I1125 16:04:28.392255 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92111edc-c57b-453c-bfbb-508c29a7c0f0","Type":"ContainerStarted","Data":"9a41940d1d7096a7ac78292ded9aeabee98b8be50ed168573db512a271259bd8"} Nov 25 16:04:28 crc kubenswrapper[4879]: I1125 16:04:28.415693 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.415671734 podStartE2EDuration="2.415671734s" podCreationTimestamp="2025-11-25 16:04:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:28.409565109 +0000 UTC m=+5960.012978180" watchObservedRunningTime="2025-11-25 16:04:28.415671734 +0000 UTC m=+5960.019084815" Nov 25 16:04:28 crc kubenswrapper[4879]: I1125 16:04:28.652459 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:28 crc kubenswrapper[4879]: I1125 16:04:28.664103 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:29 crc kubenswrapper[4879]: I1125 16:04:29.060228 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 25 16:04:29 crc kubenswrapper[4879]: I1125 16:04:29.412683 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 25 16:04:31 crc kubenswrapper[4879]: I1125 16:04:31.782414 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 16:04:32 crc kubenswrapper[4879]: I1125 16:04:32.655226 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:04:32 crc kubenswrapper[4879]: I1125 16:04:32.655287 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:04:32 crc kubenswrapper[4879]: I1125 16:04:32.698912 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:04:32 crc kubenswrapper[4879]: I1125 16:04:32.698963 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:04:33 crc kubenswrapper[4879]: I1125 16:04:33.580594 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 25 16:04:33 crc kubenswrapper[4879]: I1125 16:04:33.738379 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.90:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:04:33 crc kubenswrapper[4879]: I1125 16:04:33.738790 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.90:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:04:33 crc kubenswrapper[4879]: I1125 16:04:33.822351 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.91:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:04:33 crc kubenswrapper[4879]: I1125 16:04:33.822506 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.91:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:04:34 crc kubenswrapper[4879]: I1125 16:04:34.644604 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:04:34 crc kubenswrapper[4879]: E1125 16:04:34.645148 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.780584 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.782495 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.785724 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.789771 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.953109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bdef3752-880c-44b5-b1ed-8e0ca53b113d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.953236 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.953333 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.953428 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tdmc\" (UniqueName: \"kubernetes.io/projected/bdef3752-880c-44b5-b1ed-8e0ca53b113d-kube-api-access-9tdmc\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.953535 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:35 crc kubenswrapper[4879]: I1125 16:04:35.953664 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-scripts\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bdef3752-880c-44b5-b1ed-8e0ca53b113d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055354 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055385 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055417 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tdmc\" (UniqueName: \"kubernetes.io/projected/bdef3752-880c-44b5-b1ed-8e0ca53b113d-kube-api-access-9tdmc\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055476 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055473 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bdef3752-880c-44b5-b1ed-8e0ca53b113d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.055517 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-scripts\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.061861 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-scripts\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.062610 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.062669 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.062712 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.077536 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tdmc\" (UniqueName: \"kubernetes.io/projected/bdef3752-880c-44b5-b1ed-8e0ca53b113d-kube-api-access-9tdmc\") pod \"cinder-scheduler-0\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.115960 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.576586 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:36 crc kubenswrapper[4879]: W1125 16:04:36.577993 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbdef3752_880c_44b5_b1ed_8e0ca53b113d.slice/crio-fddf8839bc84f9e606668c98de9db60963d2e9bf3469990d6ef9955ad72ebb4b WatchSource:0}: Error finding container fddf8839bc84f9e606668c98de9db60963d2e9bf3469990d6ef9955ad72ebb4b: Status 404 returned error can't find the container with id fddf8839bc84f9e606668c98de9db60963d2e9bf3469990d6ef9955ad72ebb4b Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.782626 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 25 16:04:36 crc kubenswrapper[4879]: I1125 16:04:36.814533 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 25 16:04:37 crc kubenswrapper[4879]: I1125 16:04:37.409948 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:37 crc kubenswrapper[4879]: I1125 16:04:37.411337 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api-log" containerID="cri-o://c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d" gracePeriod=30 Nov 25 16:04:37 crc kubenswrapper[4879]: I1125 16:04:37.412650 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api" containerID="cri-o://b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95" gracePeriod=30 Nov 25 16:04:37 crc kubenswrapper[4879]: I1125 16:04:37.497905 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bdef3752-880c-44b5-b1ed-8e0ca53b113d","Type":"ContainerStarted","Data":"eddbbb6b3634367b4b3d07df5c64316f91335d028126ea0057936d07c228b88b"} Nov 25 16:04:37 crc kubenswrapper[4879]: I1125 16:04:37.497964 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bdef3752-880c-44b5-b1ed-8e0ca53b113d","Type":"ContainerStarted","Data":"fddf8839bc84f9e606668c98de9db60963d2e9bf3469990d6ef9955ad72ebb4b"} Nov 25 16:04:37 crc kubenswrapper[4879]: I1125 16:04:37.526804 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.038043 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.039883 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.042450 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.051053 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195017 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195186 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195219 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195358 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-sys\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195436 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195499 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-dev\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195529 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-run\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195562 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbeb883-d1df-4c65-8125-cdeb73794af3-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195595 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195621 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195658 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195706 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195738 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195764 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195799 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.195828 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bkw8\" (UniqueName: \"kubernetes.io/projected/3fbeb883-d1df-4c65-8125-cdeb73794af3-kube-api-access-2bkw8\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297383 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297436 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297465 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-sys\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297491 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297509 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-dev\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297525 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-run\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297545 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbeb883-d1df-4c65-8125-cdeb73794af3-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297572 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297594 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297597 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297588 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-sys\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297562 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297631 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-run\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297680 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-dev\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297828 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297836 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.297884 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298001 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298063 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298088 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298173 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298220 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bkw8\" (UniqueName: \"kubernetes.io/projected/3fbeb883-d1df-4c65-8125-cdeb73794af3-kube-api-access-2bkw8\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298249 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298504 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.298784 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.301645 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3fbeb883-d1df-4c65-8125-cdeb73794af3-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.304740 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.305925 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.306305 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.306456 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3fbeb883-d1df-4c65-8125-cdeb73794af3-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.307168 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fbeb883-d1df-4c65-8125-cdeb73794af3-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.319334 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bkw8\" (UniqueName: \"kubernetes.io/projected/3fbeb883-d1df-4c65-8125-cdeb73794af3-kube-api-access-2bkw8\") pod \"cinder-volume-volume1-0\" (UID: \"3fbeb883-d1df-4c65-8125-cdeb73794af3\") " pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.359508 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.513990 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bdef3752-880c-44b5-b1ed-8e0ca53b113d","Type":"ContainerStarted","Data":"584abaf843c718518d542e052391d201884def52b90c22054ec4e7f226db44b3"} Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.516582 4879 generic.go:334] "Generic (PLEG): container finished" podID="0da1dc04-65d7-4c21-8846-887add0b4807" containerID="c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d" exitCode=143 Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.516911 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0da1dc04-65d7-4c21-8846-887add0b4807","Type":"ContainerDied","Data":"c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d"} Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.532392 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.532376622 podStartE2EDuration="3.532376622s" podCreationTimestamp="2025-11-25 16:04:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:38.529372881 +0000 UTC m=+5970.132785952" watchObservedRunningTime="2025-11-25 16:04:38.532376622 +0000 UTC m=+5970.135789693" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.781014 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.791099 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.799587 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.809272 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915234 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-scripts\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915297 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-ceph\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915346 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-nvme\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915370 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915421 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915455 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915490 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-dev\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915551 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915597 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-config-data\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915621 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-config-data-custom\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915667 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915705 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-sys\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915729 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svrjz\" (UniqueName: \"kubernetes.io/projected/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-kube-api-access-svrjz\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915772 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-run\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915796 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.915818 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-lib-modules\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.983137 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 25 16:04:38 crc kubenswrapper[4879]: I1125 16:04:38.992713 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017235 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017314 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017344 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017374 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-dev\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017432 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017470 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-config-data\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017495 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-config-data-custom\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017537 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017571 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-sys\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017594 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svrjz\" (UniqueName: \"kubernetes.io/projected/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-kube-api-access-svrjz\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017638 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-run\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017657 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017678 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-lib-modules\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017704 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-scripts\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017726 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-ceph\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017762 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-nvme\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.017875 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-nvme\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018092 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018149 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-sys\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018179 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018208 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-dev\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018252 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018757 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018869 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-lib-modules\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018899 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-run\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.018949 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.025412 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-ceph\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.025845 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-config-data\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.026049 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-scripts\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.026178 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.027158 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-config-data-custom\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.040556 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svrjz\" (UniqueName: \"kubernetes.io/projected/94d9e6dc-e680-4e3c-a4d9-29e638f2e47e-kube-api-access-svrjz\") pod \"cinder-backup-0\" (UID: \"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e\") " pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.115043 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.530059 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3fbeb883-d1df-4c65-8125-cdeb73794af3","Type":"ContainerStarted","Data":"c4d761895a4ec4437c7936e26e7ed17669ba5539ae32f99d0307b669c8264d09"} Nov 25 16:04:39 crc kubenswrapper[4879]: I1125 16:04:39.659332 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 25 16:04:40 crc kubenswrapper[4879]: I1125 16:04:40.541258 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3fbeb883-d1df-4c65-8125-cdeb73794af3","Type":"ContainerStarted","Data":"76e2e338f52ece684a17f903455bfb171bec01cc9f3515724f4331652b0695d0"} Nov 25 16:04:40 crc kubenswrapper[4879]: I1125 16:04:40.542001 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"3fbeb883-d1df-4c65-8125-cdeb73794af3","Type":"ContainerStarted","Data":"c5d36f81d30440710117c192b9242ff56dd24a4514c8634bea53210feec023cc"} Nov 25 16:04:40 crc kubenswrapper[4879]: I1125 16:04:40.542737 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e","Type":"ContainerStarted","Data":"93b1c3f4a2376737488d8f5f23d5b87026fa3b87c001761e29f3c6250f969c34"} Nov 25 16:04:40 crc kubenswrapper[4879]: I1125 16:04:40.576321 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=1.670141468 podStartE2EDuration="2.576300319s" podCreationTimestamp="2025-11-25 16:04:38 +0000 UTC" firstStartedPulling="2025-11-25 16:04:38.992465367 +0000 UTC m=+5970.595878438" lastFinishedPulling="2025-11-25 16:04:39.898624218 +0000 UTC m=+5971.502037289" observedRunningTime="2025-11-25 16:04:40.569895734 +0000 UTC m=+5972.173308825" watchObservedRunningTime="2025-11-25 16:04:40.576300319 +0000 UTC m=+5972.179713390" Nov 25 16:04:40 crc kubenswrapper[4879]: I1125 16:04:40.656255 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.88:8776/healthcheck\": dial tcp 10.217.1.88:8776: connect: connection refused" Nov 25 16:04:40 crc kubenswrapper[4879]: I1125 16:04:40.969819 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.065966 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data-custom\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.066023 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0da1dc04-65d7-4c21-8846-887add0b4807-etc-machine-id\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.066045 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-combined-ca-bundle\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.066070 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-scripts\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.066189 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvzws\" (UniqueName: \"kubernetes.io/projected/0da1dc04-65d7-4c21-8846-887add0b4807-kube-api-access-hvzws\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.066257 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0da1dc04-65d7-4c21-8846-887add0b4807-logs\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.066305 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data\") pod \"0da1dc04-65d7-4c21-8846-887add0b4807\" (UID: \"0da1dc04-65d7-4c21-8846-887add0b4807\") " Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.071348 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0da1dc04-65d7-4c21-8846-887add0b4807-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.071758 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0da1dc04-65d7-4c21-8846-887add0b4807-logs" (OuterVolumeSpecName: "logs") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.078611 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da1dc04-65d7-4c21-8846-887add0b4807-kube-api-access-hvzws" (OuterVolumeSpecName: "kube-api-access-hvzws") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "kube-api-access-hvzws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.080405 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-scripts" (OuterVolumeSpecName: "scripts") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.081901 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.099533 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.119360 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.150638 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data" (OuterVolumeSpecName: "config-data") pod "0da1dc04-65d7-4c21-8846-887add0b4807" (UID: "0da1dc04-65d7-4c21-8846-887add0b4807"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167867 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167904 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167917 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0da1dc04-65d7-4c21-8846-887add0b4807-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167926 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167935 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0da1dc04-65d7-4c21-8846-887add0b4807-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167949 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvzws\" (UniqueName: \"kubernetes.io/projected/0da1dc04-65d7-4c21-8846-887add0b4807-kube-api-access-hvzws\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.167959 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0da1dc04-65d7-4c21-8846-887add0b4807-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.560496 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e","Type":"ContainerStarted","Data":"188a34fa7dfa1d02997ecc3197501ce1ab2dc4fd24fc053e9f20fa279232892e"} Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.560850 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"94d9e6dc-e680-4e3c-a4d9-29e638f2e47e","Type":"ContainerStarted","Data":"15e369113f94591cb911ef082dec8938207e9781dc5afbbbcc4163dd237dfecd"} Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.567523 4879 generic.go:334] "Generic (PLEG): container finished" podID="0da1dc04-65d7-4c21-8846-887add0b4807" containerID="b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95" exitCode=0 Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.567596 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0da1dc04-65d7-4c21-8846-887add0b4807","Type":"ContainerDied","Data":"b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95"} Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.567627 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.567650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0da1dc04-65d7-4c21-8846-887add0b4807","Type":"ContainerDied","Data":"a4f5690c3ce4f30de559ede8a824871d09f096162f874e75df6b2517eeee1e97"} Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.567665 4879 scope.go:117] "RemoveContainer" containerID="b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.603727 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.899649254 podStartE2EDuration="3.603707149s" podCreationTimestamp="2025-11-25 16:04:38 +0000 UTC" firstStartedPulling="2025-11-25 16:04:39.649377475 +0000 UTC m=+5971.252790546" lastFinishedPulling="2025-11-25 16:04:40.35343537 +0000 UTC m=+5971.956848441" observedRunningTime="2025-11-25 16:04:41.588568869 +0000 UTC m=+5973.191981950" watchObservedRunningTime="2025-11-25 16:04:41.603707149 +0000 UTC m=+5973.207120220" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.611870 4879 scope.go:117] "RemoveContainer" containerID="c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.622384 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.631917 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.636575 4879 scope.go:117] "RemoveContainer" containerID="b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95" Nov 25 16:04:41 crc kubenswrapper[4879]: E1125 16:04:41.637092 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95\": container with ID starting with b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95 not found: ID does not exist" containerID="b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.637149 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95"} err="failed to get container status \"b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95\": rpc error: code = NotFound desc = could not find container \"b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95\": container with ID starting with b3396a74b320ac257b60268f90e254a9cff77f60a6fa473f579a43d388dc5c95 not found: ID does not exist" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.637182 4879 scope.go:117] "RemoveContainer" containerID="c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d" Nov 25 16:04:41 crc kubenswrapper[4879]: E1125 16:04:41.637480 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d\": container with ID starting with c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d not found: ID does not exist" containerID="c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.637500 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d"} err="failed to get container status \"c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d\": rpc error: code = NotFound desc = could not find container \"c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d\": container with ID starting with c516856f52a9603c7b9d890c27c579578924b0ebbcd1c31e92707fba9da0100d not found: ID does not exist" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.641874 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:41 crc kubenswrapper[4879]: E1125 16:04:41.642338 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.642358 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api" Nov 25 16:04:41 crc kubenswrapper[4879]: E1125 16:04:41.642378 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api-log" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.642385 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api-log" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.642579 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api-log" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.642597 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" containerName="cinder-api" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.643576 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.647785 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.679244 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0da1dc04-65d7-4c21-8846-887add0b4807" path="/var/lib/kubelet/pods/0da1dc04-65d7-4c21-8846-887add0b4807/volumes" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.680221 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780008 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-scripts\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780409 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkvpc\" (UniqueName: \"kubernetes.io/projected/e3a59410-e11a-420f-a12a-ffc6e4e70da5-kube-api-access-jkvpc\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780471 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780493 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e3a59410-e11a-420f-a12a-ffc6e4e70da5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780521 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780552 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3a59410-e11a-420f-a12a-ffc6e4e70da5-logs\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.780572 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-config-data\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882369 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e3a59410-e11a-420f-a12a-ffc6e4e70da5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882431 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3a59410-e11a-420f-a12a-ffc6e4e70da5-logs\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882492 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-config-data\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882533 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-scripts\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882592 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkvpc\" (UniqueName: \"kubernetes.io/projected/e3a59410-e11a-420f-a12a-ffc6e4e70da5-kube-api-access-jkvpc\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.882958 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e3a59410-e11a-420f-a12a-ffc6e4e70da5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.883714 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3a59410-e11a-420f-a12a-ffc6e4e70da5-logs\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.888331 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-scripts\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.888751 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-config-data\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.893650 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.894065 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a59410-e11a-420f-a12a-ffc6e4e70da5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.906736 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkvpc\" (UniqueName: \"kubernetes.io/projected/e3a59410-e11a-420f-a12a-ffc6e4e70da5-kube-api-access-jkvpc\") pod \"cinder-api-0\" (UID: \"e3a59410-e11a-420f-a12a-ffc6e4e70da5\") " pod="openstack/cinder-api-0" Nov 25 16:04:41 crc kubenswrapper[4879]: I1125 16:04:41.961591 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.427785 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.579006 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e3a59410-e11a-420f-a12a-ffc6e4e70da5","Type":"ContainerStarted","Data":"681897a7709b6507b42e5f834af6ba244bd0bd9d8c366178da7ebba079d6d2cd"} Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.656937 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.659316 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.663027 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.718279 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.718349 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.719454 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.719668 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.722566 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 16:04:42 crc kubenswrapper[4879]: I1125 16:04:42.725951 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 16:04:43 crc kubenswrapper[4879]: I1125 16:04:43.363600 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:43 crc kubenswrapper[4879]: I1125 16:04:43.593174 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e3a59410-e11a-420f-a12a-ffc6e4e70da5","Type":"ContainerStarted","Data":"2b551810a33633877404364197844eeb3544e5fc5c1d0c59836b7a847729422d"} Nov 25 16:04:43 crc kubenswrapper[4879]: I1125 16:04:43.597147 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 16:04:44 crc kubenswrapper[4879]: I1125 16:04:44.116141 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Nov 25 16:04:44 crc kubenswrapper[4879]: I1125 16:04:44.608113 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e3a59410-e11a-420f-a12a-ffc6e4e70da5","Type":"ContainerStarted","Data":"68401824ea0e11b1e1602d66d25378549fff6214c157a2155682095c715e4e1c"} Nov 25 16:04:44 crc kubenswrapper[4879]: I1125 16:04:44.632388 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.632367797 podStartE2EDuration="3.632367797s" podCreationTimestamp="2025-11-25 16:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:44.627080985 +0000 UTC m=+5976.230494056" watchObservedRunningTime="2025-11-25 16:04:44.632367797 +0000 UTC m=+5976.235780868" Nov 25 16:04:45 crc kubenswrapper[4879]: I1125 16:04:45.617561 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 25 16:04:45 crc kubenswrapper[4879]: I1125 16:04:45.645029 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:04:45 crc kubenswrapper[4879]: E1125 16:04:45.645603 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:04:46 crc kubenswrapper[4879]: I1125 16:04:46.300643 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 25 16:04:46 crc kubenswrapper[4879]: I1125 16:04:46.351269 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:46 crc kubenswrapper[4879]: I1125 16:04:46.641105 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="cinder-scheduler" containerID="cri-o://eddbbb6b3634367b4b3d07df5c64316f91335d028126ea0057936d07c228b88b" gracePeriod=30 Nov 25 16:04:46 crc kubenswrapper[4879]: I1125 16:04:46.641226 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="probe" containerID="cri-o://584abaf843c718518d542e052391d201884def52b90c22054ec4e7f226db44b3" gracePeriod=30 Nov 25 16:04:47 crc kubenswrapper[4879]: I1125 16:04:47.662328 4879 generic.go:334] "Generic (PLEG): container finished" podID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerID="584abaf843c718518d542e052391d201884def52b90c22054ec4e7f226db44b3" exitCode=0 Nov 25 16:04:47 crc kubenswrapper[4879]: I1125 16:04:47.662397 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bdef3752-880c-44b5-b1ed-8e0ca53b113d","Type":"ContainerDied","Data":"584abaf843c718518d542e052391d201884def52b90c22054ec4e7f226db44b3"} Nov 25 16:04:48 crc kubenswrapper[4879]: I1125 16:04:48.567508 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.334327 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.689598 4879 generic.go:334] "Generic (PLEG): container finished" podID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerID="eddbbb6b3634367b4b3d07df5c64316f91335d028126ea0057936d07c228b88b" exitCode=0 Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.689693 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bdef3752-880c-44b5-b1ed-8e0ca53b113d","Type":"ContainerDied","Data":"eddbbb6b3634367b4b3d07df5c64316f91335d028126ea0057936d07c228b88b"} Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.689993 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bdef3752-880c-44b5-b1ed-8e0ca53b113d","Type":"ContainerDied","Data":"fddf8839bc84f9e606668c98de9db60963d2e9bf3469990d6ef9955ad72ebb4b"} Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.690015 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fddf8839bc84f9e606668c98de9db60963d2e9bf3469990d6ef9955ad72ebb4b" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.731179 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.840645 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data-custom\") pod \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.840709 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tdmc\" (UniqueName: \"kubernetes.io/projected/bdef3752-880c-44b5-b1ed-8e0ca53b113d-kube-api-access-9tdmc\") pod \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.840892 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-scripts\") pod \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.840960 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data\") pod \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.840990 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bdef3752-880c-44b5-b1ed-8e0ca53b113d-etc-machine-id\") pod \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.841021 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-combined-ca-bundle\") pod \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\" (UID: \"bdef3752-880c-44b5-b1ed-8e0ca53b113d\") " Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.841294 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bdef3752-880c-44b5-b1ed-8e0ca53b113d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bdef3752-880c-44b5-b1ed-8e0ca53b113d" (UID: "bdef3752-880c-44b5-b1ed-8e0ca53b113d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.842050 4879 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bdef3752-880c-44b5-b1ed-8e0ca53b113d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.846650 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdef3752-880c-44b5-b1ed-8e0ca53b113d-kube-api-access-9tdmc" (OuterVolumeSpecName: "kube-api-access-9tdmc") pod "bdef3752-880c-44b5-b1ed-8e0ca53b113d" (UID: "bdef3752-880c-44b5-b1ed-8e0ca53b113d"). InnerVolumeSpecName "kube-api-access-9tdmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.853052 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bdef3752-880c-44b5-b1ed-8e0ca53b113d" (UID: "bdef3752-880c-44b5-b1ed-8e0ca53b113d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.857961 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-scripts" (OuterVolumeSpecName: "scripts") pod "bdef3752-880c-44b5-b1ed-8e0ca53b113d" (UID: "bdef3752-880c-44b5-b1ed-8e0ca53b113d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.894628 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bdef3752-880c-44b5-b1ed-8e0ca53b113d" (UID: "bdef3752-880c-44b5-b1ed-8e0ca53b113d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.934473 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data" (OuterVolumeSpecName: "config-data") pod "bdef3752-880c-44b5-b1ed-8e0ca53b113d" (UID: "bdef3752-880c-44b5-b1ed-8e0ca53b113d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.943952 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.943992 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tdmc\" (UniqueName: \"kubernetes.io/projected/bdef3752-880c-44b5-b1ed-8e0ca53b113d-kube-api-access-9tdmc\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.944003 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.944020 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:49 crc kubenswrapper[4879]: I1125 16:04:49.944029 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bdef3752-880c-44b5-b1ed-8e0ca53b113d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.698209 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.729099 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.738804 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.789356 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:50 crc kubenswrapper[4879]: E1125 16:04:50.790645 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="cinder-scheduler" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.790684 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="cinder-scheduler" Nov 25 16:04:50 crc kubenswrapper[4879]: E1125 16:04:50.790723 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="probe" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.790732 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="probe" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.791618 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="cinder-scheduler" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.791718 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" containerName="probe" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.793842 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.797109 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.801470 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.886299 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gjq9\" (UniqueName: \"kubernetes.io/projected/58462743-d61e-44b0-bdfb-675330d8b5ad-kube-api-access-5gjq9\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.887254 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58462743-d61e-44b0-bdfb-675330d8b5ad-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.887314 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.887401 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-config-data\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.887451 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.887645 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-scripts\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.989065 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-scripts\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.989450 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gjq9\" (UniqueName: \"kubernetes.io/projected/58462743-d61e-44b0-bdfb-675330d8b5ad-kube-api-access-5gjq9\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.989880 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58462743-d61e-44b0-bdfb-675330d8b5ad-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.990033 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.990162 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-config-data\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.990699 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.989988 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58462743-d61e-44b0-bdfb-675330d8b5ad-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.994044 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.994485 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-scripts\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.994871 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:50 crc kubenswrapper[4879]: I1125 16:04:50.995246 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58462743-d61e-44b0-bdfb-675330d8b5ad-config-data\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:51 crc kubenswrapper[4879]: I1125 16:04:51.012435 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gjq9\" (UniqueName: \"kubernetes.io/projected/58462743-d61e-44b0-bdfb-675330d8b5ad-kube-api-access-5gjq9\") pod \"cinder-scheduler-0\" (UID: \"58462743-d61e-44b0-bdfb-675330d8b5ad\") " pod="openstack/cinder-scheduler-0" Nov 25 16:04:51 crc kubenswrapper[4879]: I1125 16:04:51.119808 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 25 16:04:51 crc kubenswrapper[4879]: I1125 16:04:51.610657 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 25 16:04:51 crc kubenswrapper[4879]: I1125 16:04:51.658257 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdef3752-880c-44b5-b1ed-8e0ca53b113d" path="/var/lib/kubelet/pods/bdef3752-880c-44b5-b1ed-8e0ca53b113d/volumes" Nov 25 16:04:51 crc kubenswrapper[4879]: I1125 16:04:51.718306 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"58462743-d61e-44b0-bdfb-675330d8b5ad","Type":"ContainerStarted","Data":"d253b2410686886297d6b47046729ae916f955de2a93a61faab52085fa5b801d"} Nov 25 16:04:52 crc kubenswrapper[4879]: I1125 16:04:52.728303 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"58462743-d61e-44b0-bdfb-675330d8b5ad","Type":"ContainerStarted","Data":"70fc504db318ef469824244111d7b1d604ab5896b1117e139ab9f43056e42e0b"} Nov 25 16:04:53 crc kubenswrapper[4879]: I1125 16:04:53.755547 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"58462743-d61e-44b0-bdfb-675330d8b5ad","Type":"ContainerStarted","Data":"e3665746b1a7a7679189714c0888ee1cb45ae6705768968174b5c31dff15944a"} Nov 25 16:04:53 crc kubenswrapper[4879]: I1125 16:04:53.799216 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.799195909 podStartE2EDuration="3.799195909s" podCreationTimestamp="2025-11-25 16:04:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:04:53.790585035 +0000 UTC m=+5985.393998106" watchObservedRunningTime="2025-11-25 16:04:53.799195909 +0000 UTC m=+5985.402608970" Nov 25 16:04:53 crc kubenswrapper[4879]: I1125 16:04:53.813463 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 25 16:04:56 crc kubenswrapper[4879]: I1125 16:04:56.120520 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 25 16:05:00 crc kubenswrapper[4879]: I1125 16:05:00.645408 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:05:00 crc kubenswrapper[4879]: E1125 16:05:00.646195 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:05:01 crc kubenswrapper[4879]: I1125 16:05:01.311618 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 25 16:05:11 crc kubenswrapper[4879]: I1125 16:05:11.644525 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:05:11 crc kubenswrapper[4879]: E1125 16:05:11.645312 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:05:22 crc kubenswrapper[4879]: I1125 16:05:22.645432 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:05:22 crc kubenswrapper[4879]: E1125 16:05:22.646252 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:05:36 crc kubenswrapper[4879]: I1125 16:05:36.645017 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:05:36 crc kubenswrapper[4879]: E1125 16:05:36.645765 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:05:50 crc kubenswrapper[4879]: I1125 16:05:50.645221 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:05:50 crc kubenswrapper[4879]: E1125 16:05:50.645961 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:06:04 crc kubenswrapper[4879]: I1125 16:06:04.645696 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:06:04 crc kubenswrapper[4879]: E1125 16:06:04.647406 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.021197 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l6gmq"] Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.023818 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.031917 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6gmq"] Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.090824 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-catalog-content\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.090925 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5kqr\" (UniqueName: \"kubernetes.io/projected/7b7961de-27ab-4009-8414-ebc972097396-kube-api-access-j5kqr\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.091305 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-utilities\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.195115 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5kqr\" (UniqueName: \"kubernetes.io/projected/7b7961de-27ab-4009-8414-ebc972097396-kube-api-access-j5kqr\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.195271 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-utilities\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.195734 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-utilities\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.196031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-catalog-content\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.196066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-catalog-content\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.223064 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5kqr\" (UniqueName: \"kubernetes.io/projected/7b7961de-27ab-4009-8414-ebc972097396-kube-api-access-j5kqr\") pod \"redhat-operators-l6gmq\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.363377 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:08 crc kubenswrapper[4879]: I1125 16:06:08.810482 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6gmq"] Nov 25 16:06:09 crc kubenswrapper[4879]: I1125 16:06:09.448011 4879 generic.go:334] "Generic (PLEG): container finished" podID="7b7961de-27ab-4009-8414-ebc972097396" containerID="1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882" exitCode=0 Nov 25 16:06:09 crc kubenswrapper[4879]: I1125 16:06:09.448196 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerDied","Data":"1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882"} Nov 25 16:06:09 crc kubenswrapper[4879]: I1125 16:06:09.448372 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerStarted","Data":"e6b2177123f77462e7ca47917badac391371dedd34eb4d2ff243ec5791ffe88c"} Nov 25 16:06:10 crc kubenswrapper[4879]: I1125 16:06:10.461967 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerStarted","Data":"fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0"} Nov 25 16:06:11 crc kubenswrapper[4879]: I1125 16:06:11.476588 4879 generic.go:334] "Generic (PLEG): container finished" podID="7b7961de-27ab-4009-8414-ebc972097396" containerID="fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0" exitCode=0 Nov 25 16:06:11 crc kubenswrapper[4879]: I1125 16:06:11.476662 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerDied","Data":"fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0"} Nov 25 16:06:12 crc kubenswrapper[4879]: I1125 16:06:12.492705 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerStarted","Data":"24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624"} Nov 25 16:06:12 crc kubenswrapper[4879]: I1125 16:06:12.522439 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l6gmq" podStartSLOduration=3.084219104 podStartE2EDuration="5.522420709s" podCreationTimestamp="2025-11-25 16:06:07 +0000 UTC" firstStartedPulling="2025-11-25 16:06:09.451851183 +0000 UTC m=+6061.055264264" lastFinishedPulling="2025-11-25 16:06:11.890052798 +0000 UTC m=+6063.493465869" observedRunningTime="2025-11-25 16:06:12.515216242 +0000 UTC m=+6064.118629313" watchObservedRunningTime="2025-11-25 16:06:12.522420709 +0000 UTC m=+6064.125833780" Nov 25 16:06:18 crc kubenswrapper[4879]: I1125 16:06:18.363522 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:18 crc kubenswrapper[4879]: I1125 16:06:18.363908 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:18 crc kubenswrapper[4879]: I1125 16:06:18.419365 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:18 crc kubenswrapper[4879]: I1125 16:06:18.601228 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:18 crc kubenswrapper[4879]: I1125 16:06:18.644961 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:06:19 crc kubenswrapper[4879]: I1125 16:06:19.007529 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6gmq"] Nov 25 16:06:19 crc kubenswrapper[4879]: I1125 16:06:19.573385 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"b21237490f0aefdd6974be0e787e306742d2154d0ff6faeec34f7e9ec28b48b8"} Nov 25 16:06:20 crc kubenswrapper[4879]: I1125 16:06:20.582580 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l6gmq" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="registry-server" containerID="cri-o://24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624" gracePeriod=2 Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.540382 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.567349 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5kqr\" (UniqueName: \"kubernetes.io/projected/7b7961de-27ab-4009-8414-ebc972097396-kube-api-access-j5kqr\") pod \"7b7961de-27ab-4009-8414-ebc972097396\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.567523 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-catalog-content\") pod \"7b7961de-27ab-4009-8414-ebc972097396\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.567634 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-utilities\") pod \"7b7961de-27ab-4009-8414-ebc972097396\" (UID: \"7b7961de-27ab-4009-8414-ebc972097396\") " Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.568820 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-utilities" (OuterVolumeSpecName: "utilities") pod "7b7961de-27ab-4009-8414-ebc972097396" (UID: "7b7961de-27ab-4009-8414-ebc972097396"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.575200 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b7961de-27ab-4009-8414-ebc972097396-kube-api-access-j5kqr" (OuterVolumeSpecName: "kube-api-access-j5kqr") pod "7b7961de-27ab-4009-8414-ebc972097396" (UID: "7b7961de-27ab-4009-8414-ebc972097396"). InnerVolumeSpecName "kube-api-access-j5kqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.596521 4879 generic.go:334] "Generic (PLEG): container finished" podID="7b7961de-27ab-4009-8414-ebc972097396" containerID="24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624" exitCode=0 Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.596565 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerDied","Data":"24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624"} Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.596597 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6gmq" event={"ID":"7b7961de-27ab-4009-8414-ebc972097396","Type":"ContainerDied","Data":"e6b2177123f77462e7ca47917badac391371dedd34eb4d2ff243ec5791ffe88c"} Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.596617 4879 scope.go:117] "RemoveContainer" containerID="24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.596565 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6gmq" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.625999 4879 scope.go:117] "RemoveContainer" containerID="fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.646489 4879 scope.go:117] "RemoveContainer" containerID="1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.656062 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b7961de-27ab-4009-8414-ebc972097396" (UID: "7b7961de-27ab-4009-8414-ebc972097396"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.670503 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.670537 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b7961de-27ab-4009-8414-ebc972097396-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.670548 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5kqr\" (UniqueName: \"kubernetes.io/projected/7b7961de-27ab-4009-8414-ebc972097396-kube-api-access-j5kqr\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.689823 4879 scope.go:117] "RemoveContainer" containerID="24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624" Nov 25 16:06:21 crc kubenswrapper[4879]: E1125 16:06:21.690576 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624\": container with ID starting with 24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624 not found: ID does not exist" containerID="24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.690609 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624"} err="failed to get container status \"24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624\": rpc error: code = NotFound desc = could not find container \"24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624\": container with ID starting with 24d1ea46c269f5d9565222ac8bac1fa5d7e711928a83a8aada2ef8d17c0b0624 not found: ID does not exist" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.690629 4879 scope.go:117] "RemoveContainer" containerID="fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0" Nov 25 16:06:21 crc kubenswrapper[4879]: E1125 16:06:21.690938 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0\": container with ID starting with fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0 not found: ID does not exist" containerID="fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.691008 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0"} err="failed to get container status \"fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0\": rpc error: code = NotFound desc = could not find container \"fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0\": container with ID starting with fa3ceb69459c25ca51e8be224a2f23208f45e5b16c998d99d398006e3db85de0 not found: ID does not exist" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.691037 4879 scope.go:117] "RemoveContainer" containerID="1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882" Nov 25 16:06:21 crc kubenswrapper[4879]: E1125 16:06:21.691348 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882\": container with ID starting with 1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882 not found: ID does not exist" containerID="1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.691376 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882"} err="failed to get container status \"1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882\": rpc error: code = NotFound desc = could not find container \"1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882\": container with ID starting with 1c31c3d419091f7ff1a558495da26e99ee0dca9d5321551c74862942cb6f9882 not found: ID does not exist" Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.930116 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6gmq"] Nov 25 16:06:21 crc kubenswrapper[4879]: I1125 16:06:21.938460 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l6gmq"] Nov 25 16:06:23 crc kubenswrapper[4879]: I1125 16:06:23.655062 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b7961de-27ab-4009-8414-ebc972097396" path="/var/lib/kubelet/pods/7b7961de-27ab-4009-8414-ebc972097396/volumes" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.878541 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-r78t7"] Nov 25 16:06:40 crc kubenswrapper[4879]: E1125 16:06:40.879388 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="registry-server" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.879402 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="registry-server" Nov 25 16:06:40 crc kubenswrapper[4879]: E1125 16:06:40.879429 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="extract-utilities" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.879435 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="extract-utilities" Nov 25 16:06:40 crc kubenswrapper[4879]: E1125 16:06:40.879445 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="extract-content" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.879452 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="extract-content" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.879656 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7961de-27ab-4009-8414-ebc972097396" containerName="registry-server" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.880315 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-r78t7" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.882926 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-tbmns" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.885337 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.891525 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-r78t7"] Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.910240 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-nvsvv"] Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.912337 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:40 crc kubenswrapper[4879]: I1125 16:06:40.937037 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-nvsvv"] Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034379 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-scripts\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034412 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-run\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034428 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-lib\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034450 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-etc-ovs\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034569 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk7h8\" (UniqueName: \"kubernetes.io/projected/33ca0631-dd7b-4592-a04c-ea75322c7323-kube-api-access-lk7h8\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034592 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-log\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034613 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33ca0631-dd7b-4592-a04c-ea75322c7323-scripts\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034667 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-log-ovn\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034713 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-run\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034730 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-run-ovn\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.034768 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5pd6\" (UniqueName: \"kubernetes.io/projected/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-kube-api-access-z5pd6\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136471 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5pd6\" (UniqueName: \"kubernetes.io/projected/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-kube-api-access-z5pd6\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136533 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-scripts\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136551 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-run\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136566 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-lib\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136585 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-etc-ovs\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136669 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk7h8\" (UniqueName: \"kubernetes.io/projected/33ca0631-dd7b-4592-a04c-ea75322c7323-kube-api-access-lk7h8\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136689 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-log\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136716 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33ca0631-dd7b-4592-a04c-ea75322c7323-scripts\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136768 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-log-ovn\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136800 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-run\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136823 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-run-ovn\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136952 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-run-ovn\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136951 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-run\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.136981 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-log\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.137048 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-etc-ovs\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.137054 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-log-ovn\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.137091 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-var-lib\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.137148 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/33ca0631-dd7b-4592-a04c-ea75322c7323-var-run\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.138883 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33ca0631-dd7b-4592-a04c-ea75322c7323-scripts\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.139084 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-scripts\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.155483 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk7h8\" (UniqueName: \"kubernetes.io/projected/33ca0631-dd7b-4592-a04c-ea75322c7323-kube-api-access-lk7h8\") pod \"ovn-controller-r78t7\" (UID: \"33ca0631-dd7b-4592-a04c-ea75322c7323\") " pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.155500 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5pd6\" (UniqueName: \"kubernetes.io/projected/c7787cd4-dbb1-4a83-b79b-017ba868ad0f-kube-api-access-z5pd6\") pod \"ovn-controller-ovs-nvsvv\" (UID: \"c7787cd4-dbb1-4a83-b79b-017ba868ad0f\") " pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.201577 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-r78t7" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.235521 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.684910 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-r78t7"] Nov 25 16:06:41 crc kubenswrapper[4879]: I1125 16:06:41.782449 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-r78t7" event={"ID":"33ca0631-dd7b-4592-a04c-ea75322c7323","Type":"ContainerStarted","Data":"d1527315b8b3311db25ce9a21eadcbcdb773262e269ce3391ec5fee4d0fac968"} Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.061869 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-nvsvv"] Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.342957 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-zxj5g"] Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.344530 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.349533 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.355637 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zxj5g"] Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.366495 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fdcb65bc-485f-4f17-9167-a6407d985f44-ovs-rundir\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.366579 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fdcb65bc-485f-4f17-9167-a6407d985f44-ovn-rundir\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.366624 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdcb65bc-485f-4f17-9167-a6407d985f44-config\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.366840 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqgfd\" (UniqueName: \"kubernetes.io/projected/fdcb65bc-485f-4f17-9167-a6407d985f44-kube-api-access-tqgfd\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.468640 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqgfd\" (UniqueName: \"kubernetes.io/projected/fdcb65bc-485f-4f17-9167-a6407d985f44-kube-api-access-tqgfd\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.468746 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fdcb65bc-485f-4f17-9167-a6407d985f44-ovs-rundir\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.468802 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fdcb65bc-485f-4f17-9167-a6407d985f44-ovn-rundir\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.468833 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdcb65bc-485f-4f17-9167-a6407d985f44-config\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.469054 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/fdcb65bc-485f-4f17-9167-a6407d985f44-ovn-rundir\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.469050 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/fdcb65bc-485f-4f17-9167-a6407d985f44-ovs-rundir\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.469656 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdcb65bc-485f-4f17-9167-a6407d985f44-config\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.495587 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqgfd\" (UniqueName: \"kubernetes.io/projected/fdcb65bc-485f-4f17-9167-a6407d985f44-kube-api-access-tqgfd\") pod \"ovn-controller-metrics-zxj5g\" (UID: \"fdcb65bc-485f-4f17-9167-a6407d985f44\") " pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.671166 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zxj5g" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.822814 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nvsvv" event={"ID":"c7787cd4-dbb1-4a83-b79b-017ba868ad0f","Type":"ContainerStarted","Data":"04ddfd73d550d779b07fd0233f14c450cfa2546585b68521d9bafc22641fb940"} Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.823141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nvsvv" event={"ID":"c7787cd4-dbb1-4a83-b79b-017ba868ad0f","Type":"ContainerStarted","Data":"45647a463296661aa22e8c0f5865b1918331f7166503408e95bc4d2637bc61b2"} Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.843762 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-r78t7" event={"ID":"33ca0631-dd7b-4592-a04c-ea75322c7323","Type":"ContainerStarted","Data":"c27b84c58d26450b0a1a982c52435f520af1920b56f1222bd8af5376c60f7af7"} Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.843945 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-r78t7" Nov 25 16:06:42 crc kubenswrapper[4879]: I1125 16:06:42.895621 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-r78t7" podStartSLOduration=2.895601013 podStartE2EDuration="2.895601013s" podCreationTimestamp="2025-11-25 16:06:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:06:42.886607729 +0000 UTC m=+6094.490020800" watchObservedRunningTime="2025-11-25 16:06:42.895601013 +0000 UTC m=+6094.499014084" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.128785 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-hjvjm"] Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.130150 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.137956 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-hjvjm"] Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.195838 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e841bc3b-e1b7-4c00-b207-f4f66c225be9-operator-scripts\") pod \"octavia-db-create-hjvjm\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.195903 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9msl7\" (UniqueName: \"kubernetes.io/projected/e841bc3b-e1b7-4c00-b207-f4f66c225be9-kube-api-access-9msl7\") pod \"octavia-db-create-hjvjm\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.200133 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zxj5g"] Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.296974 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e841bc3b-e1b7-4c00-b207-f4f66c225be9-operator-scripts\") pod \"octavia-db-create-hjvjm\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.297256 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9msl7\" (UniqueName: \"kubernetes.io/projected/e841bc3b-e1b7-4c00-b207-f4f66c225be9-kube-api-access-9msl7\") pod \"octavia-db-create-hjvjm\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.297825 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e841bc3b-e1b7-4c00-b207-f4f66c225be9-operator-scripts\") pod \"octavia-db-create-hjvjm\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.318260 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9msl7\" (UniqueName: \"kubernetes.io/projected/e841bc3b-e1b7-4c00-b207-f4f66c225be9-kube-api-access-9msl7\") pod \"octavia-db-create-hjvjm\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.444683 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.859870 4879 generic.go:334] "Generic (PLEG): container finished" podID="c7787cd4-dbb1-4a83-b79b-017ba868ad0f" containerID="04ddfd73d550d779b07fd0233f14c450cfa2546585b68521d9bafc22641fb940" exitCode=0 Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.859929 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nvsvv" event={"ID":"c7787cd4-dbb1-4a83-b79b-017ba868ad0f","Type":"ContainerDied","Data":"04ddfd73d550d779b07fd0233f14c450cfa2546585b68521d9bafc22641fb940"} Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.862259 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zxj5g" event={"ID":"fdcb65bc-485f-4f17-9167-a6407d985f44","Type":"ContainerStarted","Data":"ce620c27cb386e50c8315db18207f68077ec488fce7496d62e39e7351fb12d93"} Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.862313 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zxj5g" event={"ID":"fdcb65bc-485f-4f17-9167-a6407d985f44","Type":"ContainerStarted","Data":"d67c9a53559cbef6eb08c5b944a5ea1f6aa2dda64e022a4f97b656d7e6bd49aa"} Nov 25 16:06:43 crc kubenswrapper[4879]: I1125 16:06:43.910238 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-hjvjm"] Nov 25 16:06:43 crc kubenswrapper[4879]: W1125 16:06:43.917589 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode841bc3b_e1b7_4c00_b207_f4f66c225be9.slice/crio-e8cf6187f6c44b482835359e12326fa0726d7a2147ba5f33a565e71a6243030e WatchSource:0}: Error finding container e8cf6187f6c44b482835359e12326fa0726d7a2147ba5f33a565e71a6243030e: Status 404 returned error can't find the container with id e8cf6187f6c44b482835359e12326fa0726d7a2147ba5f33a565e71a6243030e Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.407183 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-3f0a-account-create-gj9n8"] Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.413082 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.420503 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.423421 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-3f0a-account-create-gj9n8"] Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.424391 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/420b294c-2281-48eb-9339-d5fbe28c57b3-operator-scripts\") pod \"octavia-3f0a-account-create-gj9n8\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.424630 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq2fn\" (UniqueName: \"kubernetes.io/projected/420b294c-2281-48eb-9339-d5fbe28c57b3-kube-api-access-pq2fn\") pod \"octavia-3f0a-account-create-gj9n8\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.526731 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/420b294c-2281-48eb-9339-d5fbe28c57b3-operator-scripts\") pod \"octavia-3f0a-account-create-gj9n8\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.526813 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq2fn\" (UniqueName: \"kubernetes.io/projected/420b294c-2281-48eb-9339-d5fbe28c57b3-kube-api-access-pq2fn\") pod \"octavia-3f0a-account-create-gj9n8\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.527812 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/420b294c-2281-48eb-9339-d5fbe28c57b3-operator-scripts\") pod \"octavia-3f0a-account-create-gj9n8\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.556468 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq2fn\" (UniqueName: \"kubernetes.io/projected/420b294c-2281-48eb-9339-d5fbe28c57b3-kube-api-access-pq2fn\") pod \"octavia-3f0a-account-create-gj9n8\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.739804 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.878768 4879 generic.go:334] "Generic (PLEG): container finished" podID="e841bc3b-e1b7-4c00-b207-f4f66c225be9" containerID="6e8684bb91fceb1ed97148d20455dedbb54f41ef47574c3f53c647c80a5b4256" exitCode=0 Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.879113 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-hjvjm" event={"ID":"e841bc3b-e1b7-4c00-b207-f4f66c225be9","Type":"ContainerDied","Data":"6e8684bb91fceb1ed97148d20455dedbb54f41ef47574c3f53c647c80a5b4256"} Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.879158 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-hjvjm" event={"ID":"e841bc3b-e1b7-4c00-b207-f4f66c225be9","Type":"ContainerStarted","Data":"e8cf6187f6c44b482835359e12326fa0726d7a2147ba5f33a565e71a6243030e"} Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.882552 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nvsvv" event={"ID":"c7787cd4-dbb1-4a83-b79b-017ba868ad0f","Type":"ContainerStarted","Data":"cca584104577b58e4722c78df0f1946ae1adc3c761b8bd9393abe9046c950a4d"} Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.882584 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-nvsvv" event={"ID":"c7787cd4-dbb1-4a83-b79b-017ba868ad0f","Type":"ContainerStarted","Data":"5dd38d2c615ca20c6843b545d70ccd205c6f67fbf1b8e671e553b327645d34ad"} Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.882988 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.883047 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.926103 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-zxj5g" podStartSLOduration=2.926084775 podStartE2EDuration="2.926084775s" podCreationTimestamp="2025-11-25 16:06:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:06:44.908649391 +0000 UTC m=+6096.512062462" watchObservedRunningTime="2025-11-25 16:06:44.926084775 +0000 UTC m=+6096.529497856" Nov 25 16:06:44 crc kubenswrapper[4879]: I1125 16:06:44.952139 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-nvsvv" podStartSLOduration=4.952109681 podStartE2EDuration="4.952109681s" podCreationTimestamp="2025-11-25 16:06:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:06:44.942029647 +0000 UTC m=+6096.545442718" watchObservedRunningTime="2025-11-25 16:06:44.952109681 +0000 UTC m=+6096.555522752" Nov 25 16:06:45 crc kubenswrapper[4879]: I1125 16:06:45.203112 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-3f0a-account-create-gj9n8"] Nov 25 16:06:45 crc kubenswrapper[4879]: I1125 16:06:45.897515 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3f0a-account-create-gj9n8" event={"ID":"420b294c-2281-48eb-9339-d5fbe28c57b3","Type":"ContainerStarted","Data":"5230f5a06017969bd201f7aac98cf3d5009f6f4b3f958e378ab3d41b4d2154c8"} Nov 25 16:06:45 crc kubenswrapper[4879]: I1125 16:06:45.897862 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3f0a-account-create-gj9n8" event={"ID":"420b294c-2281-48eb-9339-d5fbe28c57b3","Type":"ContainerStarted","Data":"74834d5082fec1521fdd1354e507572781ca62826f081c0f089e114e4bb678d6"} Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.059452 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7345-account-create-csxxd"] Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.082865 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-fdl5v"] Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.092929 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-fdl5v"] Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.102612 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7345-account-create-csxxd"] Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.202229 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.363943 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e841bc3b-e1b7-4c00-b207-f4f66c225be9-operator-scripts\") pod \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.364488 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9msl7\" (UniqueName: \"kubernetes.io/projected/e841bc3b-e1b7-4c00-b207-f4f66c225be9-kube-api-access-9msl7\") pod \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\" (UID: \"e841bc3b-e1b7-4c00-b207-f4f66c225be9\") " Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.364660 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e841bc3b-e1b7-4c00-b207-f4f66c225be9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e841bc3b-e1b7-4c00-b207-f4f66c225be9" (UID: "e841bc3b-e1b7-4c00-b207-f4f66c225be9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.364989 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e841bc3b-e1b7-4c00-b207-f4f66c225be9-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.374036 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e841bc3b-e1b7-4c00-b207-f4f66c225be9-kube-api-access-9msl7" (OuterVolumeSpecName: "kube-api-access-9msl7") pod "e841bc3b-e1b7-4c00-b207-f4f66c225be9" (UID: "e841bc3b-e1b7-4c00-b207-f4f66c225be9"). InnerVolumeSpecName "kube-api-access-9msl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.466924 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9msl7\" (UniqueName: \"kubernetes.io/projected/e841bc3b-e1b7-4c00-b207-f4f66c225be9-kube-api-access-9msl7\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.910531 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-hjvjm" event={"ID":"e841bc3b-e1b7-4c00-b207-f4f66c225be9","Type":"ContainerDied","Data":"e8cf6187f6c44b482835359e12326fa0726d7a2147ba5f33a565e71a6243030e"} Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.910574 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8cf6187f6c44b482835359e12326fa0726d7a2147ba5f33a565e71a6243030e" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.910587 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-hjvjm" Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.912134 4879 generic.go:334] "Generic (PLEG): container finished" podID="420b294c-2281-48eb-9339-d5fbe28c57b3" containerID="5230f5a06017969bd201f7aac98cf3d5009f6f4b3f958e378ab3d41b4d2154c8" exitCode=0 Nov 25 16:06:46 crc kubenswrapper[4879]: I1125 16:06:46.912152 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3f0a-account-create-gj9n8" event={"ID":"420b294c-2281-48eb-9339-d5fbe28c57b3","Type":"ContainerDied","Data":"5230f5a06017969bd201f7aac98cf3d5009f6f4b3f958e378ab3d41b4d2154c8"} Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.243667 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.383303 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq2fn\" (UniqueName: \"kubernetes.io/projected/420b294c-2281-48eb-9339-d5fbe28c57b3-kube-api-access-pq2fn\") pod \"420b294c-2281-48eb-9339-d5fbe28c57b3\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.383461 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/420b294c-2281-48eb-9339-d5fbe28c57b3-operator-scripts\") pod \"420b294c-2281-48eb-9339-d5fbe28c57b3\" (UID: \"420b294c-2281-48eb-9339-d5fbe28c57b3\") " Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.384284 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/420b294c-2281-48eb-9339-d5fbe28c57b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "420b294c-2281-48eb-9339-d5fbe28c57b3" (UID: "420b294c-2281-48eb-9339-d5fbe28c57b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.391710 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/420b294c-2281-48eb-9339-d5fbe28c57b3-kube-api-access-pq2fn" (OuterVolumeSpecName: "kube-api-access-pq2fn") pod "420b294c-2281-48eb-9339-d5fbe28c57b3" (UID: "420b294c-2281-48eb-9339-d5fbe28c57b3"). InnerVolumeSpecName "kube-api-access-pq2fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.485517 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq2fn\" (UniqueName: \"kubernetes.io/projected/420b294c-2281-48eb-9339-d5fbe28c57b3-kube-api-access-pq2fn\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.485552 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/420b294c-2281-48eb-9339-d5fbe28c57b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.660334 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04b41c50-a8bf-4159-9632-7c84953367b9" path="/var/lib/kubelet/pods/04b41c50-a8bf-4159-9632-7c84953367b9/volumes" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.660944 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4" path="/var/lib/kubelet/pods/ac980b0e-5d92-48b7-a8cb-9bd4b490b9f4/volumes" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.922376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3f0a-account-create-gj9n8" event={"ID":"420b294c-2281-48eb-9339-d5fbe28c57b3","Type":"ContainerDied","Data":"74834d5082fec1521fdd1354e507572781ca62826f081c0f089e114e4bb678d6"} Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.922649 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74834d5082fec1521fdd1354e507572781ca62826f081c0f089e114e4bb678d6" Nov 25 16:06:47 crc kubenswrapper[4879]: I1125 16:06:47.922428 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3f0a-account-create-gj9n8" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.541485 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-8fh2s"] Nov 25 16:06:49 crc kubenswrapper[4879]: E1125 16:06:49.541949 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="420b294c-2281-48eb-9339-d5fbe28c57b3" containerName="mariadb-account-create" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.541968 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="420b294c-2281-48eb-9339-d5fbe28c57b3" containerName="mariadb-account-create" Nov 25 16:06:49 crc kubenswrapper[4879]: E1125 16:06:49.541999 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e841bc3b-e1b7-4c00-b207-f4f66c225be9" containerName="mariadb-database-create" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.542007 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e841bc3b-e1b7-4c00-b207-f4f66c225be9" containerName="mariadb-database-create" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.542264 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="420b294c-2281-48eb-9339-d5fbe28c57b3" containerName="mariadb-account-create" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.542315 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e841bc3b-e1b7-4c00-b207-f4f66c225be9" containerName="mariadb-database-create" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.543057 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.551100 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-8fh2s"] Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.623537 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43b99a4e-a3e4-4129-816c-e139e5806ec1-operator-scripts\") pod \"octavia-persistence-db-create-8fh2s\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.623640 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9pj9\" (UniqueName: \"kubernetes.io/projected/43b99a4e-a3e4-4129-816c-e139e5806ec1-kube-api-access-r9pj9\") pod \"octavia-persistence-db-create-8fh2s\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.726482 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43b99a4e-a3e4-4129-816c-e139e5806ec1-operator-scripts\") pod \"octavia-persistence-db-create-8fh2s\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.726625 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9pj9\" (UniqueName: \"kubernetes.io/projected/43b99a4e-a3e4-4129-816c-e139e5806ec1-kube-api-access-r9pj9\") pod \"octavia-persistence-db-create-8fh2s\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.727490 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43b99a4e-a3e4-4129-816c-e139e5806ec1-operator-scripts\") pod \"octavia-persistence-db-create-8fh2s\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.743542 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9pj9\" (UniqueName: \"kubernetes.io/projected/43b99a4e-a3e4-4129-816c-e139e5806ec1-kube-api-access-r9pj9\") pod \"octavia-persistence-db-create-8fh2s\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:49 crc kubenswrapper[4879]: I1125 16:06:49.872969 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.221721 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-c616-account-create-h586d"] Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.222902 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.226953 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.236272 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-c616-account-create-h586d"] Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.319801 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-8fh2s"] Nov 25 16:06:50 crc kubenswrapper[4879]: W1125 16:06:50.334644 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43b99a4e_a3e4_4129_816c_e139e5806ec1.slice/crio-f9dad97bd45ad7ef66387755b7735fbb6d8301cce20983c4071a315a3c51adcb WatchSource:0}: Error finding container f9dad97bd45ad7ef66387755b7735fbb6d8301cce20983c4071a315a3c51adcb: Status 404 returned error can't find the container with id f9dad97bd45ad7ef66387755b7735fbb6d8301cce20983c4071a315a3c51adcb Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.337244 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d347fc00-931a-40cf-83ca-07761d0a217c-operator-scripts\") pod \"octavia-c616-account-create-h586d\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.337389 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7hj5\" (UniqueName: \"kubernetes.io/projected/d347fc00-931a-40cf-83ca-07761d0a217c-kube-api-access-z7hj5\") pod \"octavia-c616-account-create-h586d\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.439959 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d347fc00-931a-40cf-83ca-07761d0a217c-operator-scripts\") pod \"octavia-c616-account-create-h586d\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.440058 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7hj5\" (UniqueName: \"kubernetes.io/projected/d347fc00-931a-40cf-83ca-07761d0a217c-kube-api-access-z7hj5\") pod \"octavia-c616-account-create-h586d\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.440821 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d347fc00-931a-40cf-83ca-07761d0a217c-operator-scripts\") pod \"octavia-c616-account-create-h586d\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.468921 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7hj5\" (UniqueName: \"kubernetes.io/projected/d347fc00-931a-40cf-83ca-07761d0a217c-kube-api-access-z7hj5\") pod \"octavia-c616-account-create-h586d\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.543700 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.839481 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-c616-account-create-h586d"] Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.954574 4879 generic.go:334] "Generic (PLEG): container finished" podID="43b99a4e-a3e4-4129-816c-e139e5806ec1" containerID="4e340da432b6a6de1251e61d3036fdee65596bb25210a4c73038b49be4590453" exitCode=0 Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.954650 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-8fh2s" event={"ID":"43b99a4e-a3e4-4129-816c-e139e5806ec1","Type":"ContainerDied","Data":"4e340da432b6a6de1251e61d3036fdee65596bb25210a4c73038b49be4590453"} Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.954679 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-8fh2s" event={"ID":"43b99a4e-a3e4-4129-816c-e139e5806ec1","Type":"ContainerStarted","Data":"f9dad97bd45ad7ef66387755b7735fbb6d8301cce20983c4071a315a3c51adcb"} Nov 25 16:06:50 crc kubenswrapper[4879]: I1125 16:06:50.959469 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c616-account-create-h586d" event={"ID":"d347fc00-931a-40cf-83ca-07761d0a217c","Type":"ContainerStarted","Data":"08e02b18e23f8506eedd477877dbf4281e4fc11e2dc4ca4e9d30d16035a8fa3a"} Nov 25 16:06:51 crc kubenswrapper[4879]: I1125 16:06:51.969915 4879 generic.go:334] "Generic (PLEG): container finished" podID="d347fc00-931a-40cf-83ca-07761d0a217c" containerID="ebf17fa06c11020ad972f12b814444b371a00d03f9ef458bd1d4bdfa5aea08c0" exitCode=0 Nov 25 16:06:51 crc kubenswrapper[4879]: I1125 16:06:51.969970 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c616-account-create-h586d" event={"ID":"d347fc00-931a-40cf-83ca-07761d0a217c","Type":"ContainerDied","Data":"ebf17fa06c11020ad972f12b814444b371a00d03f9ef458bd1d4bdfa5aea08c0"} Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.031032 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4pllz"] Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.042437 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4pllz"] Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.309229 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.478726 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43b99a4e-a3e4-4129-816c-e139e5806ec1-operator-scripts\") pod \"43b99a4e-a3e4-4129-816c-e139e5806ec1\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.479208 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43b99a4e-a3e4-4129-816c-e139e5806ec1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43b99a4e-a3e4-4129-816c-e139e5806ec1" (UID: "43b99a4e-a3e4-4129-816c-e139e5806ec1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.479333 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9pj9\" (UniqueName: \"kubernetes.io/projected/43b99a4e-a3e4-4129-816c-e139e5806ec1-kube-api-access-r9pj9\") pod \"43b99a4e-a3e4-4129-816c-e139e5806ec1\" (UID: \"43b99a4e-a3e4-4129-816c-e139e5806ec1\") " Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.480083 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43b99a4e-a3e4-4129-816c-e139e5806ec1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.488376 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43b99a4e-a3e4-4129-816c-e139e5806ec1-kube-api-access-r9pj9" (OuterVolumeSpecName: "kube-api-access-r9pj9") pod "43b99a4e-a3e4-4129-816c-e139e5806ec1" (UID: "43b99a4e-a3e4-4129-816c-e139e5806ec1"). InnerVolumeSpecName "kube-api-access-r9pj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.581759 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9pj9\" (UniqueName: \"kubernetes.io/projected/43b99a4e-a3e4-4129-816c-e139e5806ec1-kube-api-access-r9pj9\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.983458 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-8fh2s" event={"ID":"43b99a4e-a3e4-4129-816c-e139e5806ec1","Type":"ContainerDied","Data":"f9dad97bd45ad7ef66387755b7735fbb6d8301cce20983c4071a315a3c51adcb"} Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.983507 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9dad97bd45ad7ef66387755b7735fbb6d8301cce20983c4071a315a3c51adcb" Nov 25 16:06:52 crc kubenswrapper[4879]: I1125 16:06:52.983544 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-8fh2s" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.297669 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.395921 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7hj5\" (UniqueName: \"kubernetes.io/projected/d347fc00-931a-40cf-83ca-07761d0a217c-kube-api-access-z7hj5\") pod \"d347fc00-931a-40cf-83ca-07761d0a217c\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.396376 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d347fc00-931a-40cf-83ca-07761d0a217c-operator-scripts\") pod \"d347fc00-931a-40cf-83ca-07761d0a217c\" (UID: \"d347fc00-931a-40cf-83ca-07761d0a217c\") " Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.396809 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d347fc00-931a-40cf-83ca-07761d0a217c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d347fc00-931a-40cf-83ca-07761d0a217c" (UID: "d347fc00-931a-40cf-83ca-07761d0a217c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.397226 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d347fc00-931a-40cf-83ca-07761d0a217c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.400385 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d347fc00-931a-40cf-83ca-07761d0a217c-kube-api-access-z7hj5" (OuterVolumeSpecName: "kube-api-access-z7hj5") pod "d347fc00-931a-40cf-83ca-07761d0a217c" (UID: "d347fc00-931a-40cf-83ca-07761d0a217c"). InnerVolumeSpecName "kube-api-access-z7hj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.499466 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7hj5\" (UniqueName: \"kubernetes.io/projected/d347fc00-931a-40cf-83ca-07761d0a217c-kube-api-access-z7hj5\") on node \"crc\" DevicePath \"\"" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.655529 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aadf716-b59d-4ef9-9427-0ceb46ff8816" path="/var/lib/kubelet/pods/2aadf716-b59d-4ef9-9427-0ceb46ff8816/volumes" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.993358 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-c616-account-create-h586d" event={"ID":"d347fc00-931a-40cf-83ca-07761d0a217c","Type":"ContainerDied","Data":"08e02b18e23f8506eedd477877dbf4281e4fc11e2dc4ca4e9d30d16035a8fa3a"} Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.994103 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08e02b18e23f8506eedd477877dbf4281e4fc11e2dc4ca4e9d30d16035a8fa3a" Nov 25 16:06:53 crc kubenswrapper[4879]: I1125 16:06:53.993635 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-c616-account-create-h586d" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.273155 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-6df846d798-cksqh"] Nov 25 16:06:56 crc kubenswrapper[4879]: E1125 16:06:56.273898 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43b99a4e-a3e4-4129-816c-e139e5806ec1" containerName="mariadb-database-create" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.273909 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="43b99a4e-a3e4-4129-816c-e139e5806ec1" containerName="mariadb-database-create" Nov 25 16:06:56 crc kubenswrapper[4879]: E1125 16:06:56.273940 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d347fc00-931a-40cf-83ca-07761d0a217c" containerName="mariadb-account-create" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.273947 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d347fc00-931a-40cf-83ca-07761d0a217c" containerName="mariadb-account-create" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.274111 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="43b99a4e-a3e4-4129-816c-e139e5806ec1" containerName="mariadb-database-create" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.274148 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d347fc00-931a-40cf-83ca-07761d0a217c" containerName="mariadb-account-create" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.288889 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6df846d798-cksqh"] Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.289017 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.294114 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.294146 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.294306 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-rg7m7" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.455350 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-combined-ca-bundle\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.455769 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-config-data\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.455794 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-scripts\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.455828 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/7e906050-a1ea-4637-9c1d-5237d58fad24-octavia-run\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.456091 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7e906050-a1ea-4637-9c1d-5237d58fad24-config-data-merged\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.557718 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7e906050-a1ea-4637-9c1d-5237d58fad24-config-data-merged\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.557784 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-combined-ca-bundle\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.557866 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-config-data\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.557889 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-scripts\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.557923 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/7e906050-a1ea-4637-9c1d-5237d58fad24-octavia-run\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.558868 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/7e906050-a1ea-4637-9c1d-5237d58fad24-octavia-run\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.558985 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7e906050-a1ea-4637-9c1d-5237d58fad24-config-data-merged\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.564110 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-combined-ca-bundle\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.564266 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-config-data\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.567855 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e906050-a1ea-4637-9c1d-5237d58fad24-scripts\") pod \"octavia-api-6df846d798-cksqh\" (UID: \"7e906050-a1ea-4637-9c1d-5237d58fad24\") " pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:56 crc kubenswrapper[4879]: I1125 16:06:56.611647 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:06:57 crc kubenswrapper[4879]: I1125 16:06:57.138092 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-6df846d798-cksqh"] Nov 25 16:06:58 crc kubenswrapper[4879]: I1125 16:06:58.043857 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6df846d798-cksqh" event={"ID":"7e906050-a1ea-4637-9c1d-5237d58fad24","Type":"ContainerStarted","Data":"2354d77a07a88799cd0a0be0d440a8382b79bea3234e0f2d913d3cc3f8df7446"} Nov 25 16:07:05 crc kubenswrapper[4879]: I1125 16:07:05.114468 4879 generic.go:334] "Generic (PLEG): container finished" podID="7e906050-a1ea-4637-9c1d-5237d58fad24" containerID="9110b145dff06bdaed3f4d6e478177effe8f7ce6bd54d9a6364ab3ebdafbafec" exitCode=0 Nov 25 16:07:05 crc kubenswrapper[4879]: I1125 16:07:05.114565 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6df846d798-cksqh" event={"ID":"7e906050-a1ea-4637-9c1d-5237d58fad24","Type":"ContainerDied","Data":"9110b145dff06bdaed3f4d6e478177effe8f7ce6bd54d9a6364ab3ebdafbafec"} Nov 25 16:07:06 crc kubenswrapper[4879]: I1125 16:07:06.132116 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6df846d798-cksqh" event={"ID":"7e906050-a1ea-4637-9c1d-5237d58fad24","Type":"ContainerStarted","Data":"ea42eed487e7b0c10090ec41a9d42d6a54ec575de4348111e447271deaeb57b2"} Nov 25 16:07:06 crc kubenswrapper[4879]: I1125 16:07:06.132722 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-6df846d798-cksqh" event={"ID":"7e906050-a1ea-4637-9c1d-5237d58fad24","Type":"ContainerStarted","Data":"a5b3f1b91d37ac9b5644178f87d0a8bf97625d5559b7db6c4a85b12ecbe691cf"} Nov 25 16:07:06 crc kubenswrapper[4879]: I1125 16:07:06.132818 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:07:06 crc kubenswrapper[4879]: I1125 16:07:06.133374 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:07:06 crc kubenswrapper[4879]: I1125 16:07:06.178257 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-6df846d798-cksqh" podStartSLOduration=2.720458633 podStartE2EDuration="10.178223934s" podCreationTimestamp="2025-11-25 16:06:56 +0000 UTC" firstStartedPulling="2025-11-25 16:06:57.142666056 +0000 UTC m=+6108.746079127" lastFinishedPulling="2025-11-25 16:07:04.600431357 +0000 UTC m=+6116.203844428" observedRunningTime="2025-11-25 16:07:06.164697847 +0000 UTC m=+6117.768110948" watchObservedRunningTime="2025-11-25 16:07:06.178223934 +0000 UTC m=+6117.781637005" Nov 25 16:07:07 crc kubenswrapper[4879]: I1125 16:07:07.039565 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ctt6t"] Nov 25 16:07:07 crc kubenswrapper[4879]: I1125 16:07:07.051625 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ctt6t"] Nov 25 16:07:07 crc kubenswrapper[4879]: I1125 16:07:07.670467 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b74a6428-86dc-4239-8792-65fcd245f5ce" path="/var/lib/kubelet/pods/b74a6428-86dc-4239-8792-65fcd245f5ce/volumes" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:15.572881 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:15.609837 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-6df846d798-cksqh" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.294057 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-r78t7" podUID="33ca0631-dd7b-4592-a04c-ea75322c7323" containerName="ovn-controller" probeResult="failure" output=< Nov 25 16:07:16 crc kubenswrapper[4879]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 25 16:07:16 crc kubenswrapper[4879]: > Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.414244 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.474023 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-nvsvv" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.585712 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-r78t7-config-r4794"] Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.588645 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.592337 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.600693 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-r78t7-config-r4794"] Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.720322 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run-ovn\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.720469 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-scripts\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.721044 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-additional-scripts\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.721132 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-log-ovn\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.721233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.721330 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsjb8\" (UniqueName: \"kubernetes.io/projected/865208f0-f193-41b0-9cf3-045fcb640b26-kube-api-access-lsjb8\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824031 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run-ovn\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824197 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-scripts\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824340 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-additional-scripts\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824361 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-log-ovn\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824401 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824458 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsjb8\" (UniqueName: \"kubernetes.io/projected/865208f0-f193-41b0-9cf3-045fcb640b26-kube-api-access-lsjb8\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.824646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run-ovn\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.825025 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.825510 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-log-ovn\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.826192 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-additional-scripts\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.827007 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-scripts\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.848647 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsjb8\" (UniqueName: \"kubernetes.io/projected/865208f0-f193-41b0-9cf3-045fcb640b26-kube-api-access-lsjb8\") pod \"ovn-controller-r78t7-config-r4794\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:16 crc kubenswrapper[4879]: I1125 16:07:16.912130 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:17 crc kubenswrapper[4879]: I1125 16:07:17.412536 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-r78t7-config-r4794"] Nov 25 16:07:17 crc kubenswrapper[4879]: I1125 16:07:17.428046 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-r78t7-config-r4794" event={"ID":"865208f0-f193-41b0-9cf3-045fcb640b26","Type":"ContainerStarted","Data":"99f38a305b3f6ca7d707f52397fe6e37ba7ae159c72f56439ded7ee8ef71e7c6"} Nov 25 16:07:18 crc kubenswrapper[4879]: I1125 16:07:18.444799 4879 generic.go:334] "Generic (PLEG): container finished" podID="865208f0-f193-41b0-9cf3-045fcb640b26" containerID="ab7f8ba18c86ea1eef8301e95c54db547483da2025edc51f07d1590860e1cbd3" exitCode=0 Nov 25 16:07:18 crc kubenswrapper[4879]: I1125 16:07:18.445171 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-r78t7-config-r4794" event={"ID":"865208f0-f193-41b0-9cf3-045fcb640b26","Type":"ContainerDied","Data":"ab7f8ba18c86ea1eef8301e95c54db547483da2025edc51f07d1590860e1cbd3"} Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.797249 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.885771 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run-ovn\") pod \"865208f0-f193-41b0-9cf3-045fcb640b26\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886173 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsjb8\" (UniqueName: \"kubernetes.io/projected/865208f0-f193-41b0-9cf3-045fcb640b26-kube-api-access-lsjb8\") pod \"865208f0-f193-41b0-9cf3-045fcb640b26\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886288 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-scripts\") pod \"865208f0-f193-41b0-9cf3-045fcb640b26\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.885997 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "865208f0-f193-41b0-9cf3-045fcb640b26" (UID: "865208f0-f193-41b0-9cf3-045fcb640b26"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886466 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-additional-scripts\") pod \"865208f0-f193-41b0-9cf3-045fcb640b26\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886548 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-log-ovn\") pod \"865208f0-f193-41b0-9cf3-045fcb640b26\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886623 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "865208f0-f193-41b0-9cf3-045fcb640b26" (UID: "865208f0-f193-41b0-9cf3-045fcb640b26"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886720 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run\") pod \"865208f0-f193-41b0-9cf3-045fcb640b26\" (UID: \"865208f0-f193-41b0-9cf3-045fcb640b26\") " Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.886763 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run" (OuterVolumeSpecName: "var-run") pod "865208f0-f193-41b0-9cf3-045fcb640b26" (UID: "865208f0-f193-41b0-9cf3-045fcb640b26"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.887071 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "865208f0-f193-41b0-9cf3-045fcb640b26" (UID: "865208f0-f193-41b0-9cf3-045fcb640b26"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.887318 4879 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.887393 4879 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.887454 4879 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.887514 4879 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/865208f0-f193-41b0-9cf3-045fcb640b26-var-run\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.887532 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-scripts" (OuterVolumeSpecName: "scripts") pod "865208f0-f193-41b0-9cf3-045fcb640b26" (UID: "865208f0-f193-41b0-9cf3-045fcb640b26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.896253 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/865208f0-f193-41b0-9cf3-045fcb640b26-kube-api-access-lsjb8" (OuterVolumeSpecName: "kube-api-access-lsjb8") pod "865208f0-f193-41b0-9cf3-045fcb640b26" (UID: "865208f0-f193-41b0-9cf3-045fcb640b26"). InnerVolumeSpecName "kube-api-access-lsjb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.989312 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsjb8\" (UniqueName: \"kubernetes.io/projected/865208f0-f193-41b0-9cf3-045fcb640b26-kube-api-access-lsjb8\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:19 crc kubenswrapper[4879]: I1125 16:07:19.989352 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/865208f0-f193-41b0-9cf3-045fcb640b26-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:20 crc kubenswrapper[4879]: I1125 16:07:20.473470 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-r78t7-config-r4794" event={"ID":"865208f0-f193-41b0-9cf3-045fcb640b26","Type":"ContainerDied","Data":"99f38a305b3f6ca7d707f52397fe6e37ba7ae159c72f56439ded7ee8ef71e7c6"} Nov 25 16:07:20 crc kubenswrapper[4879]: I1125 16:07:20.473516 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99f38a305b3f6ca7d707f52397fe6e37ba7ae159c72f56439ded7ee8ef71e7c6" Nov 25 16:07:20 crc kubenswrapper[4879]: I1125 16:07:20.473588 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-r78t7-config-r4794" Nov 25 16:07:20 crc kubenswrapper[4879]: I1125 16:07:20.882909 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-r78t7-config-r4794"] Nov 25 16:07:20 crc kubenswrapper[4879]: I1125 16:07:20.892422 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-r78t7-config-r4794"] Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.159774 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-rq4tk"] Nov 25 16:07:21 crc kubenswrapper[4879]: E1125 16:07:21.161022 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="865208f0-f193-41b0-9cf3-045fcb640b26" containerName="ovn-config" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.161074 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="865208f0-f193-41b0-9cf3-045fcb640b26" containerName="ovn-config" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.161530 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="865208f0-f193-41b0-9cf3-045fcb640b26" containerName="ovn-config" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.162818 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.164702 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.164984 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.167759 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.176384 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-rq4tk"] Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.247026 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-r78t7" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.314395 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/fa7912a7-1d8d-437c-9611-2e2474e48f80-hm-ports\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.314456 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa7912a7-1d8d-437c-9611-2e2474e48f80-scripts\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.314491 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/fa7912a7-1d8d-437c-9611-2e2474e48f80-config-data-merged\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.314953 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7912a7-1d8d-437c-9611-2e2474e48f80-config-data\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.416683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/fa7912a7-1d8d-437c-9611-2e2474e48f80-hm-ports\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.416734 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa7912a7-1d8d-437c-9611-2e2474e48f80-scripts\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.416757 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/fa7912a7-1d8d-437c-9611-2e2474e48f80-config-data-merged\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.416944 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7912a7-1d8d-437c-9611-2e2474e48f80-config-data\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.417383 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/fa7912a7-1d8d-437c-9611-2e2474e48f80-config-data-merged\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.417766 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/fa7912a7-1d8d-437c-9611-2e2474e48f80-hm-ports\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.422408 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa7912a7-1d8d-437c-9611-2e2474e48f80-config-data\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.427704 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa7912a7-1d8d-437c-9611-2e2474e48f80-scripts\") pod \"octavia-rsyslog-rq4tk\" (UID: \"fa7912a7-1d8d-437c-9611-2e2474e48f80\") " pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.506659 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.661051 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="865208f0-f193-41b0-9cf3-045fcb640b26" path="/var/lib/kubelet/pods/865208f0-f193-41b0-9cf3-045fcb640b26/volumes" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.744920 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-s662j"] Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.746718 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.750489 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.760909 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-s662j"] Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.824386 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/05e66130-c6cf-4d1a-948b-bdde934893e8-amphora-image\") pod \"octavia-image-upload-59f8cff499-s662j\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.824420 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05e66130-c6cf-4d1a-948b-bdde934893e8-httpd-config\") pod \"octavia-image-upload-59f8cff499-s662j\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.926410 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/05e66130-c6cf-4d1a-948b-bdde934893e8-amphora-image\") pod \"octavia-image-upload-59f8cff499-s662j\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.926470 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05e66130-c6cf-4d1a-948b-bdde934893e8-httpd-config\") pod \"octavia-image-upload-59f8cff499-s662j\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.927237 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/05e66130-c6cf-4d1a-948b-bdde934893e8-amphora-image\") pod \"octavia-image-upload-59f8cff499-s662j\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:21 crc kubenswrapper[4879]: I1125 16:07:21.931546 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05e66130-c6cf-4d1a-948b-bdde934893e8-httpd-config\") pod \"octavia-image-upload-59f8cff499-s662j\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:22 crc kubenswrapper[4879]: W1125 16:07:22.042364 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa7912a7_1d8d_437c_9611_2e2474e48f80.slice/crio-2b924927005fc60e39f8335a1ea04f319aac995293607f0e02ef6bc4611c0123 WatchSource:0}: Error finding container 2b924927005fc60e39f8335a1ea04f319aac995293607f0e02ef6bc4611c0123: Status 404 returned error can't find the container with id 2b924927005fc60e39f8335a1ea04f319aac995293607f0e02ef6bc4611c0123 Nov 25 16:07:22 crc kubenswrapper[4879]: I1125 16:07:22.042947 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-rq4tk"] Nov 25 16:07:22 crc kubenswrapper[4879]: I1125 16:07:22.072401 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:07:22 crc kubenswrapper[4879]: I1125 16:07:22.500325 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-rq4tk" event={"ID":"fa7912a7-1d8d-437c-9611-2e2474e48f80","Type":"ContainerStarted","Data":"2b924927005fc60e39f8335a1ea04f319aac995293607f0e02ef6bc4611c0123"} Nov 25 16:07:22 crc kubenswrapper[4879]: I1125 16:07:22.506485 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-s662j"] Nov 25 16:07:22 crc kubenswrapper[4879]: W1125 16:07:22.514168 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05e66130_c6cf_4d1a_948b_bdde934893e8.slice/crio-ade5d184c5558254888be403c37cc9dbcb76a4dfed5eec1fa36f3be3529ad2e3 WatchSource:0}: Error finding container ade5d184c5558254888be403c37cc9dbcb76a4dfed5eec1fa36f3be3529ad2e3: Status 404 returned error can't find the container with id ade5d184c5558254888be403c37cc9dbcb76a4dfed5eec1fa36f3be3529ad2e3 Nov 25 16:07:23 crc kubenswrapper[4879]: I1125 16:07:23.510805 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-s662j" event={"ID":"05e66130-c6cf-4d1a-948b-bdde934893e8","Type":"ContainerStarted","Data":"ade5d184c5558254888be403c37cc9dbcb76a4dfed5eec1fa36f3be3529ad2e3"} Nov 25 16:07:24 crc kubenswrapper[4879]: I1125 16:07:24.524303 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-rq4tk" event={"ID":"fa7912a7-1d8d-437c-9611-2e2474e48f80","Type":"ContainerStarted","Data":"8b853f582a58ddc1479b60c881c8570e6db8a334f245120ba7487befac695632"} Nov 25 16:07:25 crc kubenswrapper[4879]: I1125 16:07:25.928426 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-pcrl2"] Nov 25 16:07:25 crc kubenswrapper[4879]: I1125 16:07:25.930438 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:25 crc kubenswrapper[4879]: I1125 16:07:25.933214 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Nov 25 16:07:25 crc kubenswrapper[4879]: I1125 16:07:25.933292 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Nov 25 16:07:25 crc kubenswrapper[4879]: I1125 16:07:25.933610 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Nov 25 16:07:25 crc kubenswrapper[4879]: I1125 16:07:25.942044 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-pcrl2"] Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.010296 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-config-data\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.010350 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7f8f40c8-3493-4c88-be75-78de60f1d094-hm-ports\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.010401 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7f8f40c8-3493-4c88-be75-78de60f1d094-config-data-merged\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.010459 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-scripts\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.010514 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-amphora-certs\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.010535 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-combined-ca-bundle\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.112787 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-config-data\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.112856 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7f8f40c8-3493-4c88-be75-78de60f1d094-hm-ports\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.112917 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7f8f40c8-3493-4c88-be75-78de60f1d094-config-data-merged\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.113000 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-scripts\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.113057 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-amphora-certs\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.113077 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-combined-ca-bundle\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.123478 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7f8f40c8-3493-4c88-be75-78de60f1d094-config-data-merged\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.124519 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7f8f40c8-3493-4c88-be75-78de60f1d094-hm-ports\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.134328 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-r72bb"] Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.135610 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-config-data\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.136192 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-combined-ca-bundle\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.136376 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-scripts\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.136616 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.138048 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7f8f40c8-3493-4c88-be75-78de60f1d094-amphora-certs\") pod \"octavia-healthmanager-pcrl2\" (UID: \"7f8f40c8-3493-4c88-be75-78de60f1d094\") " pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.144058 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.151929 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-r72bb"] Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.216193 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-config-data\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.216263 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-combined-ca-bundle\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.216341 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/48e59a94-f34e-428a-8695-2f005b4804c5-config-data-merged\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.216436 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-scripts\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.261068 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.318196 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-scripts\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.318276 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-config-data\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.318303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-combined-ca-bundle\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.318361 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/48e59a94-f34e-428a-8695-2f005b4804c5-config-data-merged\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.318958 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/48e59a94-f34e-428a-8695-2f005b4804c5-config-data-merged\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.324887 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-scripts\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.325283 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-combined-ca-bundle\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.329612 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-config-data\") pod \"octavia-db-sync-r72bb\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.550516 4879 generic.go:334] "Generic (PLEG): container finished" podID="fa7912a7-1d8d-437c-9611-2e2474e48f80" containerID="8b853f582a58ddc1479b60c881c8570e6db8a334f245120ba7487befac695632" exitCode=0 Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.550598 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-rq4tk" event={"ID":"fa7912a7-1d8d-437c-9611-2e2474e48f80","Type":"ContainerDied","Data":"8b853f582a58ddc1479b60c881c8570e6db8a334f245120ba7487befac695632"} Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.572527 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:26 crc kubenswrapper[4879]: I1125 16:07:26.910181 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-pcrl2"] Nov 25 16:07:26 crc kubenswrapper[4879]: W1125 16:07:26.917340 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f8f40c8_3493_4c88_be75_78de60f1d094.slice/crio-381660e17fc73662c19505cac5102bad35c90253fed71c75928e268e68011d51 WatchSource:0}: Error finding container 381660e17fc73662c19505cac5102bad35c90253fed71c75928e268e68011d51: Status 404 returned error can't find the container with id 381660e17fc73662c19505cac5102bad35c90253fed71c75928e268e68011d51 Nov 25 16:07:27 crc kubenswrapper[4879]: I1125 16:07:27.085076 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-r72bb"] Nov 25 16:07:27 crc kubenswrapper[4879]: W1125 16:07:27.085637 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48e59a94_f34e_428a_8695_2f005b4804c5.slice/crio-a112bf07b209aac42bd486324f1f54c8e22e38fabf0f322b24fad7754f7167f0 WatchSource:0}: Error finding container a112bf07b209aac42bd486324f1f54c8e22e38fabf0f322b24fad7754f7167f0: Status 404 returned error can't find the container with id a112bf07b209aac42bd486324f1f54c8e22e38fabf0f322b24fad7754f7167f0 Nov 25 16:07:27 crc kubenswrapper[4879]: I1125 16:07:27.561304 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-pcrl2" event={"ID":"7f8f40c8-3493-4c88-be75-78de60f1d094","Type":"ContainerStarted","Data":"381660e17fc73662c19505cac5102bad35c90253fed71c75928e268e68011d51"} Nov 25 16:07:27 crc kubenswrapper[4879]: I1125 16:07:27.563748 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-r72bb" event={"ID":"48e59a94-f34e-428a-8695-2f005b4804c5","Type":"ContainerStarted","Data":"a112bf07b209aac42bd486324f1f54c8e22e38fabf0f322b24fad7754f7167f0"} Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.006271 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-qqlr2"] Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.009633 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.014203 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.014499 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.021041 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-qqlr2"] Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.063918 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-config-data\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.063970 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-config-data-merged\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.063993 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-combined-ca-bundle\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.064012 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-hm-ports\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.064057 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-amphora-certs\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.064194 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-scripts\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.166313 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-amphora-certs\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.166418 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-scripts\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.166538 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-config-data\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.166570 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-config-data-merged\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.166600 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-combined-ca-bundle\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.166619 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-hm-ports\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.168146 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-hm-ports\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.169408 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-config-data-merged\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.176539 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-config-data\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.176543 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-scripts\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.177224 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-amphora-certs\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.182295 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fe2c7fb-209d-4890-8cdf-613c5b44fc47-combined-ca-bundle\") pod \"octavia-housekeeping-qqlr2\" (UID: \"6fe2c7fb-209d-4890-8cdf-613c5b44fc47\") " pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.336840 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.575591 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-pcrl2" event={"ID":"7f8f40c8-3493-4c88-be75-78de60f1d094","Type":"ContainerStarted","Data":"33eb999c8f3b64baa92a0b7cd93cc8d6564d3671b92c4dcb7be5e163e2165f92"} Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.581529 4879 generic.go:334] "Generic (PLEG): container finished" podID="48e59a94-f34e-428a-8695-2f005b4804c5" containerID="764eb2d23aecc1a91db7cce42e187c1a4b08d1104a56887f6c449ca5c4f3c924" exitCode=0 Nov 25 16:07:28 crc kubenswrapper[4879]: I1125 16:07:28.581572 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-r72bb" event={"ID":"48e59a94-f34e-428a-8695-2f005b4804c5","Type":"ContainerDied","Data":"764eb2d23aecc1a91db7cce42e187c1a4b08d1104a56887f6c449ca5c4f3c924"} Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.516984 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-6tbg6"] Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.519309 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.521684 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.521853 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.536511 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-6tbg6"] Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.600457 4879 generic.go:334] "Generic (PLEG): container finished" podID="7f8f40c8-3493-4c88-be75-78de60f1d094" containerID="33eb999c8f3b64baa92a0b7cd93cc8d6564d3671b92c4dcb7be5e163e2165f92" exitCode=0 Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.600540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-pcrl2" event={"ID":"7f8f40c8-3493-4c88-be75-78de60f1d094","Type":"ContainerDied","Data":"33eb999c8f3b64baa92a0b7cd93cc8d6564d3671b92c4dcb7be5e163e2165f92"} Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.620369 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-config-data\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.620441 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-scripts\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.620470 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-combined-ca-bundle\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.620653 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-amphora-certs\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.620809 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-hm-ports\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.620888 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-config-data-merged\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.723496 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-config-data\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.723631 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-scripts\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.723684 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-combined-ca-bundle\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.723747 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-amphora-certs\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.723808 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-hm-ports\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.723848 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-config-data-merged\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.725464 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-config-data-merged\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.725646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-hm-ports\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.729881 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-scripts\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.731202 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-combined-ca-bundle\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.731370 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-config-data\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.732856 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9-amphora-certs\") pod \"octavia-worker-6tbg6\" (UID: \"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9\") " pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:30 crc kubenswrapper[4879]: I1125 16:07:30.846386 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.615733 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-qqlr2"] Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.642367 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-s662j" event={"ID":"05e66130-c6cf-4d1a-948b-bdde934893e8","Type":"ContainerStarted","Data":"654f6d1e9bcf7c02908454133a1ea2cb60f2b0b84e4cd454d00003c667884cef"} Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.654114 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-r72bb" event={"ID":"48e59a94-f34e-428a-8695-2f005b4804c5","Type":"ContainerStarted","Data":"116ee236e2a0d82331df17a180456d133adb4bc22bb44c1bef5209cb1eb5c672"} Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.661201 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-rq4tk" event={"ID":"fa7912a7-1d8d-437c-9611-2e2474e48f80","Type":"ContainerStarted","Data":"5deea37f77eabb78e31bd9ddd64d832b346e27dfd064555a265f91557e2ae997"} Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.661756 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.693077 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-rq4tk" podStartSLOduration=1.705955916 podStartE2EDuration="11.693057697s" podCreationTimestamp="2025-11-25 16:07:21 +0000 UTC" firstStartedPulling="2025-11-25 16:07:22.04498652 +0000 UTC m=+6133.648399591" lastFinishedPulling="2025-11-25 16:07:32.032088301 +0000 UTC m=+6143.635501372" observedRunningTime="2025-11-25 16:07:32.692698458 +0000 UTC m=+6144.296111529" watchObservedRunningTime="2025-11-25 16:07:32.693057697 +0000 UTC m=+6144.296470768" Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.726540 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-r72bb" podStartSLOduration=6.726514195 podStartE2EDuration="6.726514195s" podCreationTimestamp="2025-11-25 16:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:07:32.716696549 +0000 UTC m=+6144.320109630" watchObservedRunningTime="2025-11-25 16:07:32.726514195 +0000 UTC m=+6144.329927266" Nov 25 16:07:32 crc kubenswrapper[4879]: W1125 16:07:32.754901 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85ca893f_d4cb_4ee3_80d6_6d6b5b6804d9.slice/crio-bf7aea113a06c1f8476bb139180abb668bbce2accb70130eac7840e998fbf064 WatchSource:0}: Error finding container bf7aea113a06c1f8476bb139180abb668bbce2accb70130eac7840e998fbf064: Status 404 returned error can't find the container with id bf7aea113a06c1f8476bb139180abb668bbce2accb70130eac7840e998fbf064 Nov 25 16:07:32 crc kubenswrapper[4879]: I1125 16:07:32.765551 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-6tbg6"] Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.674155 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qqlr2" event={"ID":"6fe2c7fb-209d-4890-8cdf-613c5b44fc47","Type":"ContainerStarted","Data":"41ec164abbf4654e8070b270ed67491d3cfa27dba0ac31d65e5dc6374c4e996c"} Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.677070 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-pcrl2" event={"ID":"7f8f40c8-3493-4c88-be75-78de60f1d094","Type":"ContainerStarted","Data":"bd4b65f659e5470e6616463eddbf2d948c820dc2036e2bc48943324c15e95f4e"} Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.677279 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.686280 4879 generic.go:334] "Generic (PLEG): container finished" podID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerID="654f6d1e9bcf7c02908454133a1ea2cb60f2b0b84e4cd454d00003c667884cef" exitCode=0 Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.686344 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-s662j" event={"ID":"05e66130-c6cf-4d1a-948b-bdde934893e8","Type":"ContainerDied","Data":"654f6d1e9bcf7c02908454133a1ea2cb60f2b0b84e4cd454d00003c667884cef"} Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.689810 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6tbg6" event={"ID":"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9","Type":"ContainerStarted","Data":"bf7aea113a06c1f8476bb139180abb668bbce2accb70130eac7840e998fbf064"} Nov 25 16:07:33 crc kubenswrapper[4879]: I1125 16:07:33.699471 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-pcrl2" podStartSLOduration=8.699456098 podStartE2EDuration="8.699456098s" podCreationTimestamp="2025-11-25 16:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:07:33.695587793 +0000 UTC m=+6145.299000864" watchObservedRunningTime="2025-11-25 16:07:33.699456098 +0000 UTC m=+6145.302869169" Nov 25 16:07:35 crc kubenswrapper[4879]: I1125 16:07:35.716664 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6tbg6" event={"ID":"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9","Type":"ContainerStarted","Data":"bbce6001107172ac4efc67b8a6fd30bfbdf408a706142750e66e323ac9e6aa38"} Nov 25 16:07:35 crc kubenswrapper[4879]: I1125 16:07:35.719937 4879 generic.go:334] "Generic (PLEG): container finished" podID="6fe2c7fb-209d-4890-8cdf-613c5b44fc47" containerID="92512e9545d433b457e76829d1fc476e082520bd25a6b5e508307e03abd790c0" exitCode=0 Nov 25 16:07:35 crc kubenswrapper[4879]: I1125 16:07:35.720099 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qqlr2" event={"ID":"6fe2c7fb-209d-4890-8cdf-613c5b44fc47","Type":"ContainerDied","Data":"92512e9545d433b457e76829d1fc476e082520bd25a6b5e508307e03abd790c0"} Nov 25 16:07:35 crc kubenswrapper[4879]: I1125 16:07:35.737089 4879 generic.go:334] "Generic (PLEG): container finished" podID="48e59a94-f34e-428a-8695-2f005b4804c5" containerID="116ee236e2a0d82331df17a180456d133adb4bc22bb44c1bef5209cb1eb5c672" exitCode=0 Nov 25 16:07:35 crc kubenswrapper[4879]: I1125 16:07:35.737168 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-r72bb" event={"ID":"48e59a94-f34e-428a-8695-2f005b4804c5","Type":"ContainerDied","Data":"116ee236e2a0d82331df17a180456d133adb4bc22bb44c1bef5209cb1eb5c672"} Nov 25 16:07:36 crc kubenswrapper[4879]: I1125 16:07:36.751228 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-s662j" event={"ID":"05e66130-c6cf-4d1a-948b-bdde934893e8","Type":"ContainerStarted","Data":"d337762861fd82c3e1576369c1b58967bfcec3e5ed4d7a41d9049f2a17b6945b"} Nov 25 16:07:36 crc kubenswrapper[4879]: I1125 16:07:36.752788 4879 generic.go:334] "Generic (PLEG): container finished" podID="85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9" containerID="bbce6001107172ac4efc67b8a6fd30bfbdf408a706142750e66e323ac9e6aa38" exitCode=0 Nov 25 16:07:36 crc kubenswrapper[4879]: I1125 16:07:36.752921 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6tbg6" event={"ID":"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9","Type":"ContainerDied","Data":"bbce6001107172ac4efc67b8a6fd30bfbdf408a706142750e66e323ac9e6aa38"} Nov 25 16:07:36 crc kubenswrapper[4879]: I1125 16:07:36.756012 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-qqlr2" event={"ID":"6fe2c7fb-209d-4890-8cdf-613c5b44fc47","Type":"ContainerStarted","Data":"ba8819cc60b17f94b5ffdb1e9fcd44346f21d149f0b525e7ee69335e7d51c054"} Nov 25 16:07:36 crc kubenswrapper[4879]: I1125 16:07:36.842700 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-qqlr2" podStartSLOduration=8.022530513 podStartE2EDuration="9.842678756s" podCreationTimestamp="2025-11-25 16:07:27 +0000 UTC" firstStartedPulling="2025-11-25 16:07:32.692090512 +0000 UTC m=+6144.295503583" lastFinishedPulling="2025-11-25 16:07:34.512238755 +0000 UTC m=+6146.115651826" observedRunningTime="2025-11-25 16:07:36.799094163 +0000 UTC m=+6148.402507254" watchObservedRunningTime="2025-11-25 16:07:36.842678756 +0000 UTC m=+6148.446091827" Nov 25 16:07:36 crc kubenswrapper[4879]: I1125 16:07:36.847275 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-s662j" podStartSLOduration=2.360679203 podStartE2EDuration="15.847253839s" podCreationTimestamp="2025-11-25 16:07:21 +0000 UTC" firstStartedPulling="2025-11-25 16:07:22.516109745 +0000 UTC m=+6134.119522816" lastFinishedPulling="2025-11-25 16:07:36.002684331 +0000 UTC m=+6147.606097452" observedRunningTime="2025-11-25 16:07:36.771963807 +0000 UTC m=+6148.375376878" watchObservedRunningTime="2025-11-25 16:07:36.847253839 +0000 UTC m=+6148.450666910" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.279158 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.371322 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-config-data\") pod \"48e59a94-f34e-428a-8695-2f005b4804c5\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.371427 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-scripts\") pod \"48e59a94-f34e-428a-8695-2f005b4804c5\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.371470 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/48e59a94-f34e-428a-8695-2f005b4804c5-config-data-merged\") pod \"48e59a94-f34e-428a-8695-2f005b4804c5\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.371667 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-combined-ca-bundle\") pod \"48e59a94-f34e-428a-8695-2f005b4804c5\" (UID: \"48e59a94-f34e-428a-8695-2f005b4804c5\") " Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.382582 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-config-data" (OuterVolumeSpecName: "config-data") pod "48e59a94-f34e-428a-8695-2f005b4804c5" (UID: "48e59a94-f34e-428a-8695-2f005b4804c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.382679 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-scripts" (OuterVolumeSpecName: "scripts") pod "48e59a94-f34e-428a-8695-2f005b4804c5" (UID: "48e59a94-f34e-428a-8695-2f005b4804c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.401813 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48e59a94-f34e-428a-8695-2f005b4804c5" (UID: "48e59a94-f34e-428a-8695-2f005b4804c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.403357 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48e59a94-f34e-428a-8695-2f005b4804c5-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "48e59a94-f34e-428a-8695-2f005b4804c5" (UID: "48e59a94-f34e-428a-8695-2f005b4804c5"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.474050 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.474086 4879 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/48e59a94-f34e-428a-8695-2f005b4804c5-config-data-merged\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.474098 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.474106 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e59a94-f34e-428a-8695-2f005b4804c5-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.766671 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-r72bb" event={"ID":"48e59a94-f34e-428a-8695-2f005b4804c5","Type":"ContainerDied","Data":"a112bf07b209aac42bd486324f1f54c8e22e38fabf0f322b24fad7754f7167f0"} Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.766980 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a112bf07b209aac42bd486324f1f54c8e22e38fabf0f322b24fad7754f7167f0" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.766686 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-r72bb" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.768847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6tbg6" event={"ID":"85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9","Type":"ContainerStarted","Data":"d161c25ccc6b1d5c5c6a67b0626d723465acf348a974e4fead6a06caf3759941"} Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.769407 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:37 crc kubenswrapper[4879]: I1125 16:07:37.798227 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-6tbg6" podStartSLOduration=6.0416743 podStartE2EDuration="7.798201696s" podCreationTimestamp="2025-11-25 16:07:30 +0000 UTC" firstStartedPulling="2025-11-25 16:07:32.759341507 +0000 UTC m=+6144.362754578" lastFinishedPulling="2025-11-25 16:07:34.515868903 +0000 UTC m=+6146.119281974" observedRunningTime="2025-11-25 16:07:37.789276633 +0000 UTC m=+6149.392689724" watchObservedRunningTime="2025-11-25 16:07:37.798201696 +0000 UTC m=+6149.401614767" Nov 25 16:07:38 crc kubenswrapper[4879]: I1125 16:07:38.779477 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:41 crc kubenswrapper[4879]: I1125 16:07:41.294588 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-pcrl2" Nov 25 16:07:43 crc kubenswrapper[4879]: I1125 16:07:43.383519 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-qqlr2" Nov 25 16:07:44 crc kubenswrapper[4879]: I1125 16:07:44.334497 4879 scope.go:117] "RemoveContainer" containerID="2e40c443f5be037996babfe2d680f7326ec514420ffdcd94c1458aea3a410bb2" Nov 25 16:07:45 crc kubenswrapper[4879]: I1125 16:07:45.878159 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-6tbg6" Nov 25 16:07:49 crc kubenswrapper[4879]: I1125 16:07:49.564314 4879 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 5.431617177s: [/var/lib/containers/storage/overlay/87f802fab7d8bc4a1cc0c97d60773be7cba3e0350eb2bb5fdd6b63a615ecc1f5/diff /var/log/pods/openstack_ovn-controller-ovs-nvsvv_c7787cd4-dbb1-4a83-b79b-017ba868ad0f/ovsdb-server/0.log]; will not log again for this container unless duration exceeds 2s Nov 25 16:07:49 crc kubenswrapper[4879]: I1125 16:07:49.782189 4879 scope.go:117] "RemoveContainer" containerID="d6f58e8d4960308886fd6ea3590564482654153daabb97d2d516550fcacc929f" Nov 25 16:07:50 crc kubenswrapper[4879]: I1125 16:07:50.198437 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="58462743-d61e-44b0-bdfb-675330d8b5ad" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.99:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:07:50 crc kubenswrapper[4879]: I1125 16:07:50.198541 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-backup-0" podUID="94d9e6dc-e680-4e3c-a4d9-29e638f2e47e" containerName="cinder-backup" probeResult="failure" output="Get \"http://10.217.1.97:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:07:50 crc kubenswrapper[4879]: I1125 16:07:50.220750 4879 scope.go:117] "RemoveContainer" containerID="0309c0ad8552798355a722b2d9835bb9585aa73c6678510e08e2972d2022ba17" Nov 25 16:07:50 crc kubenswrapper[4879]: I1125 16:07:50.299844 4879 scope.go:117] "RemoveContainer" containerID="4a5af38f4fd63a331c2146c8889a6d2a137fda52def4f2148d6193ff143256fe" Nov 25 16:07:50 crc kubenswrapper[4879]: I1125 16:07:50.418380 4879 scope.go:117] "RemoveContainer" containerID="fecb88eb3e1d141b24f52f41031140e29f6776b08b9708e5fc1edb7dfc65c584" Nov 25 16:07:50 crc kubenswrapper[4879]: I1125 16:07:50.574566 4879 scope.go:117] "RemoveContainer" containerID="d63274a205b0f5e75c14303587038911c14d25b558145749dad291ea725caabe" Nov 25 16:07:51 crc kubenswrapper[4879]: I1125 16:07:51.541693 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-rq4tk" Nov 25 16:07:52 crc kubenswrapper[4879]: I1125 16:07:52.404309 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-volume-volume1-0" podUID="3fbeb883-d1df-4c65-8125-cdeb73794af3" containerName="cinder-volume" probeResult="failure" output="Get \"http://10.217.1.96:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:08:12 crc kubenswrapper[4879]: I1125 16:08:12.932695 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-s662j"] Nov 25 16:08:12 crc kubenswrapper[4879]: I1125 16:08:12.933471 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-59f8cff499-s662j" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerName="octavia-amphora-httpd" containerID="cri-o://d337762861fd82c3e1576369c1b58967bfcec3e5ed4d7a41d9049f2a17b6945b" gracePeriod=30 Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.165767 4879 generic.go:334] "Generic (PLEG): container finished" podID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerID="d337762861fd82c3e1576369c1b58967bfcec3e5ed4d7a41d9049f2a17b6945b" exitCode=0 Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.165818 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-s662j" event={"ID":"05e66130-c6cf-4d1a-948b-bdde934893e8","Type":"ContainerDied","Data":"d337762861fd82c3e1576369c1b58967bfcec3e5ed4d7a41d9049f2a17b6945b"} Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.438784 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.506520 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/05e66130-c6cf-4d1a-948b-bdde934893e8-amphora-image\") pod \"05e66130-c6cf-4d1a-948b-bdde934893e8\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.506595 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05e66130-c6cf-4d1a-948b-bdde934893e8-httpd-config\") pod \"05e66130-c6cf-4d1a-948b-bdde934893e8\" (UID: \"05e66130-c6cf-4d1a-948b-bdde934893e8\") " Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.540383 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e66130-c6cf-4d1a-948b-bdde934893e8-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "05e66130-c6cf-4d1a-948b-bdde934893e8" (UID: "05e66130-c6cf-4d1a-948b-bdde934893e8"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.598939 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e66130-c6cf-4d1a-948b-bdde934893e8-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "05e66130-c6cf-4d1a-948b-bdde934893e8" (UID: "05e66130-c6cf-4d1a-948b-bdde934893e8"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.609406 4879 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/05e66130-c6cf-4d1a-948b-bdde934893e8-amphora-image\") on node \"crc\" DevicePath \"\"" Nov 25 16:08:13 crc kubenswrapper[4879]: I1125 16:08:13.609439 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/05e66130-c6cf-4d1a-948b-bdde934893e8-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:08:14 crc kubenswrapper[4879]: I1125 16:08:14.185028 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-s662j" Nov 25 16:08:14 crc kubenswrapper[4879]: I1125 16:08:14.184963 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-s662j" event={"ID":"05e66130-c6cf-4d1a-948b-bdde934893e8","Type":"ContainerDied","Data":"ade5d184c5558254888be403c37cc9dbcb76a4dfed5eec1fa36f3be3529ad2e3"} Nov 25 16:08:14 crc kubenswrapper[4879]: I1125 16:08:14.185150 4879 scope.go:117] "RemoveContainer" containerID="d337762861fd82c3e1576369c1b58967bfcec3e5ed4d7a41d9049f2a17b6945b" Nov 25 16:08:14 crc kubenswrapper[4879]: I1125 16:08:14.224761 4879 scope.go:117] "RemoveContainer" containerID="654f6d1e9bcf7c02908454133a1ea2cb60f2b0b84e4cd454d00003c667884cef" Nov 25 16:08:14 crc kubenswrapper[4879]: I1125 16:08:14.225415 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-s662j"] Nov 25 16:08:14 crc kubenswrapper[4879]: I1125 16:08:14.234175 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-s662j"] Nov 25 16:08:15 crc kubenswrapper[4879]: I1125 16:08:15.655105 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" path="/var/lib/kubelet/pods/05e66130-c6cf-4d1a-948b-bdde934893e8/volumes" Nov 25 16:08:47 crc kubenswrapper[4879]: I1125 16:08:47.408326 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:08:47 crc kubenswrapper[4879]: I1125 16:08:47.408878 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.558762 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-758fbfdb69-lk7dg"] Nov 25 16:08:58 crc kubenswrapper[4879]: E1125 16:08:58.559802 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48e59a94-f34e-428a-8695-2f005b4804c5" containerName="octavia-db-sync" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.559817 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="48e59a94-f34e-428a-8695-2f005b4804c5" containerName="octavia-db-sync" Nov 25 16:08:58 crc kubenswrapper[4879]: E1125 16:08:58.559829 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerName="octavia-amphora-httpd" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.559836 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerName="octavia-amphora-httpd" Nov 25 16:08:58 crc kubenswrapper[4879]: E1125 16:08:58.559848 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48e59a94-f34e-428a-8695-2f005b4804c5" containerName="init" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.559858 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="48e59a94-f34e-428a-8695-2f005b4804c5" containerName="init" Nov 25 16:08:58 crc kubenswrapper[4879]: E1125 16:08:58.559872 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerName="init" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.559878 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerName="init" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.560109 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e66130-c6cf-4d1a-948b-bdde934893e8" containerName="octavia-amphora-httpd" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.560152 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="48e59a94-f34e-428a-8695-2f005b4804c5" containerName="octavia-db-sync" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.561360 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.564494 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-dpnm7" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.564839 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.564964 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.565344 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.573344 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-758fbfdb69-lk7dg"] Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.608974 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-horizon-secret-key\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.609308 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-scripts\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.609375 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-logs\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.609617 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-config-data\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.609673 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j2cd\" (UniqueName: \"kubernetes.io/projected/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-kube-api-access-9j2cd\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.624490 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.624773 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-log" containerID="cri-o://919c6dac1862c4e9c307b3e34b91ae3c16a3d0f3406d3f2a72b15f798112cbe7" gracePeriod=30 Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.625306 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-httpd" containerID="cri-o://ea4560eee4b0c9f634d283236e61f653c8ce93b64b01edc7f6c3fc9ee0715dc0" gracePeriod=30 Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.655448 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5cc697759c-tbpxn"] Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.659045 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.698212 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cc697759c-tbpxn"] Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.711996 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j2cd\" (UniqueName: \"kubernetes.io/projected/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-kube-api-access-9j2cd\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.712056 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-scripts\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.712213 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-horizon-secret-key\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.715207 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-598q8\" (UniqueName: \"kubernetes.io/projected/6277eb20-df4e-483d-9cc2-8be3dd2eec60-kube-api-access-598q8\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.715383 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6277eb20-df4e-483d-9cc2-8be3dd2eec60-logs\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.715474 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-config-data\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.715518 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-scripts\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.715546 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-logs\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.715929 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6277eb20-df4e-483d-9cc2-8be3dd2eec60-horizon-secret-key\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.716218 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-logs\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.716489 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-config-data\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.716766 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-scripts\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.718298 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-config-data\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.720649 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-horizon-secret-key\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.738309 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j2cd\" (UniqueName: \"kubernetes.io/projected/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-kube-api-access-9j2cd\") pod \"horizon-758fbfdb69-lk7dg\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.756496 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.756869 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-httpd" containerID="cri-o://740c04b6aa3bfd660d7471ea86a023f80988c86dc3a4bb2ab4d30e22cdeae3bf" gracePeriod=30 Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.757053 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-log" containerID="cri-o://287dad8cc8bab35eda9435306e6a545520e62cbc9252b920d47fa2a903b4171e" gracePeriod=30 Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.818978 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6277eb20-df4e-483d-9cc2-8be3dd2eec60-horizon-secret-key\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.819042 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-scripts\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.819097 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-598q8\" (UniqueName: \"kubernetes.io/projected/6277eb20-df4e-483d-9cc2-8be3dd2eec60-kube-api-access-598q8\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.819180 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6277eb20-df4e-483d-9cc2-8be3dd2eec60-logs\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.819221 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-config-data\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.819731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6277eb20-df4e-483d-9cc2-8be3dd2eec60-logs\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.820003 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-scripts\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.820841 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-config-data\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.824280 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6277eb20-df4e-483d-9cc2-8be3dd2eec60-horizon-secret-key\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.838477 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-598q8\" (UniqueName: \"kubernetes.io/projected/6277eb20-df4e-483d-9cc2-8be3dd2eec60-kube-api-access-598q8\") pod \"horizon-5cc697759c-tbpxn\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.885894 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:08:58 crc kubenswrapper[4879]: I1125 16:08:58.978899 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.256108 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cc697759c-tbpxn"] Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.326890 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-78f78d4bdf-qsbw9"] Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.331688 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.347365 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78f78d4bdf-qsbw9"] Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.419389 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-758fbfdb69-lk7dg"] Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.439810 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f9fg\" (UniqueName: \"kubernetes.io/projected/130a3c03-aef1-48df-be5c-d36902fe21b6-kube-api-access-7f9fg\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.439883 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-scripts\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.439986 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-config-data\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.440012 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/130a3c03-aef1-48df-be5c-d36902fe21b6-horizon-secret-key\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.440040 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/130a3c03-aef1-48df-be5c-d36902fe21b6-logs\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: W1125 16:08:59.524176 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6277eb20_df4e_483d_9cc2_8be3dd2eec60.slice/crio-d924bbfcf202f15db9a57a49234192fe1392fc9a74c2b432e92b748ee6b42b62 WatchSource:0}: Error finding container d924bbfcf202f15db9a57a49234192fe1392fc9a74c2b432e92b748ee6b42b62: Status 404 returned error can't find the container with id d924bbfcf202f15db9a57a49234192fe1392fc9a74c2b432e92b748ee6b42b62 Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.525026 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cc697759c-tbpxn"] Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.542105 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f9fg\" (UniqueName: \"kubernetes.io/projected/130a3c03-aef1-48df-be5c-d36902fe21b6-kube-api-access-7f9fg\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.542221 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-scripts\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.542351 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-config-data\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.542393 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/130a3c03-aef1-48df-be5c-d36902fe21b6-horizon-secret-key\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.542426 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/130a3c03-aef1-48df-be5c-d36902fe21b6-logs\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.542985 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/130a3c03-aef1-48df-be5c-d36902fe21b6-logs\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.543488 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-scripts\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.543945 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-config-data\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.549314 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/130a3c03-aef1-48df-be5c-d36902fe21b6-horizon-secret-key\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.561904 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f9fg\" (UniqueName: \"kubernetes.io/projected/130a3c03-aef1-48df-be5c-d36902fe21b6-kube-api-access-7f9fg\") pod \"horizon-78f78d4bdf-qsbw9\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.633610 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc697759c-tbpxn" event={"ID":"6277eb20-df4e-483d-9cc2-8be3dd2eec60","Type":"ContainerStarted","Data":"d924bbfcf202f15db9a57a49234192fe1392fc9a74c2b432e92b748ee6b42b62"} Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.635273 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-758fbfdb69-lk7dg" event={"ID":"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1","Type":"ContainerStarted","Data":"b1ca740b33aea01a57f2b2565a64da1dea63175d8d2ecda1adb1b943fb0ae619"} Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.638756 4879 generic.go:334] "Generic (PLEG): container finished" podID="badfca4f-63b6-4ef4-bb69-da455c095844" containerID="287dad8cc8bab35eda9435306e6a545520e62cbc9252b920d47fa2a903b4171e" exitCode=143 Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.638847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"badfca4f-63b6-4ef4-bb69-da455c095844","Type":"ContainerDied","Data":"287dad8cc8bab35eda9435306e6a545520e62cbc9252b920d47fa2a903b4171e"} Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.642221 4879 generic.go:334] "Generic (PLEG): container finished" podID="d9afebe2-9a18-46a4-b728-184544602da1" containerID="919c6dac1862c4e9c307b3e34b91ae3c16a3d0f3406d3f2a72b15f798112cbe7" exitCode=143 Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.642257 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d9afebe2-9a18-46a4-b728-184544602da1","Type":"ContainerDied","Data":"919c6dac1862c4e9c307b3e34b91ae3c16a3d0f3406d3f2a72b15f798112cbe7"} Nov 25 16:08:59 crc kubenswrapper[4879]: I1125 16:08:59.674221 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:09:00 crc kubenswrapper[4879]: I1125 16:09:00.129062 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78f78d4bdf-qsbw9"] Nov 25 16:09:00 crc kubenswrapper[4879]: I1125 16:09:00.655336 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78f78d4bdf-qsbw9" event={"ID":"130a3c03-aef1-48df-be5c-d36902fe21b6","Type":"ContainerStarted","Data":"ee2024b5ce471c904f143e921fc03a12b4c21249200fcb71a63be6c56975d822"} Nov 25 16:09:02 crc kubenswrapper[4879]: I1125 16:09:02.690584 4879 generic.go:334] "Generic (PLEG): container finished" podID="d9afebe2-9a18-46a4-b728-184544602da1" containerID="ea4560eee4b0c9f634d283236e61f653c8ce93b64b01edc7f6c3fc9ee0715dc0" exitCode=0 Nov 25 16:09:02 crc kubenswrapper[4879]: I1125 16:09:02.690847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d9afebe2-9a18-46a4-b728-184544602da1","Type":"ContainerDied","Data":"ea4560eee4b0c9f634d283236e61f653c8ce93b64b01edc7f6c3fc9ee0715dc0"} Nov 25 16:09:02 crc kubenswrapper[4879]: I1125 16:09:02.694485 4879 generic.go:334] "Generic (PLEG): container finished" podID="badfca4f-63b6-4ef4-bb69-da455c095844" containerID="740c04b6aa3bfd660d7471ea86a023f80988c86dc3a4bb2ab4d30e22cdeae3bf" exitCode=0 Nov 25 16:09:02 crc kubenswrapper[4879]: I1125 16:09:02.694535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"badfca4f-63b6-4ef4-bb69-da455c095844","Type":"ContainerDied","Data":"740c04b6aa3bfd660d7471ea86a023f80988c86dc3a4bb2ab4d30e22cdeae3bf"} Nov 25 16:09:05 crc kubenswrapper[4879]: I1125 16:09:05.123242 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-httpd" probeResult="failure" output="Get \"http://10.217.1.51:9292/healthcheck\": dial tcp 10.217.1.51:9292: connect: connection refused" Nov 25 16:09:05 crc kubenswrapper[4879]: I1125 16:09:05.123533 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-log" probeResult="failure" output="Get \"http://10.217.1.51:9292/healthcheck\": dial tcp 10.217.1.51:9292: connect: connection refused" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.686139 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.744861 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"badfca4f-63b6-4ef4-bb69-da455c095844","Type":"ContainerDied","Data":"00b96846b3f5b84152c8ab3218da623e81c3171016458e81e6571286bd3724c9"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.745171 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.745199 4879 scope.go:117] "RemoveContainer" containerID="740c04b6aa3bfd660d7471ea86a023f80988c86dc3a4bb2ab4d30e22cdeae3bf" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.785592 4879 scope.go:117] "RemoveContainer" containerID="287dad8cc8bab35eda9435306e6a545520e62cbc9252b920d47fa2a903b4171e" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818420 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-config-data\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818482 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh6rd\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-kube-api-access-nh6rd\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818613 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-ceph\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818660 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-scripts\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818705 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-httpd-run\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818813 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-logs\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.818879 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-combined-ca-bundle\") pod \"badfca4f-63b6-4ef4-bb69-da455c095844\" (UID: \"badfca4f-63b6-4ef4-bb69-da455c095844\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.822394 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-logs" (OuterVolumeSpecName: "logs") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.822503 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.825359 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-ceph" (OuterVolumeSpecName: "ceph") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.826070 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-scripts" (OuterVolumeSpecName: "scripts") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.827788 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-kube-api-access-nh6rd" (OuterVolumeSpecName: "kube-api-access-nh6rd") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "kube-api-access-nh6rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.873771 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.903096 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-config-data" (OuterVolumeSpecName: "config-data") pod "badfca4f-63b6-4ef4-bb69-da455c095844" (UID: "badfca4f-63b6-4ef4-bb69-da455c095844"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923335 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923370 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh6rd\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-kube-api-access-nh6rd\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923413 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/badfca4f-63b6-4ef4-bb69-da455c095844-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923426 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923437 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923447 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/badfca4f-63b6-4ef4-bb69-da455c095844-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:06.923483 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfca4f-63b6-4ef4-bb69-da455c095844-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.098744 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.144323 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.161078 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:09:07 crc kubenswrapper[4879]: E1125 16:09:07.162026 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-log" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.162043 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-log" Nov 25 16:09:07 crc kubenswrapper[4879]: E1125 16:09:07.162067 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-httpd" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.162075 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-httpd" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.162343 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-log" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.162377 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" containerName="glance-httpd" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.163625 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.167493 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.173690 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230014 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6cc5c85-1149-4388-8606-859f5c8c2a14-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230099 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6dcb\" (UniqueName: \"kubernetes.io/projected/c6cc5c85-1149-4388-8606-859f5c8c2a14-kube-api-access-b6dcb\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230176 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6cc5c85-1149-4388-8606-859f5c8c2a14-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230234 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6cc5c85-1149-4388-8606-859f5c8c2a14-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230371 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230402 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.230449 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332447 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6cc5c85-1149-4388-8606-859f5c8c2a14-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332510 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332540 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332557 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332645 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6cc5c85-1149-4388-8606-859f5c8c2a14-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332682 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6dcb\" (UniqueName: \"kubernetes.io/projected/c6cc5c85-1149-4388-8606-859f5c8c2a14-kube-api-access-b6dcb\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332714 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6cc5c85-1149-4388-8606-859f5c8c2a14-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.332999 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6cc5c85-1149-4388-8606-859f5c8c2a14-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.333615 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6cc5c85-1149-4388-8606-859f5c8c2a14-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.336674 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.337222 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.337308 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6cc5c85-1149-4388-8606-859f5c8c2a14-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.340574 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6cc5c85-1149-4388-8606-859f5c8c2a14-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.357191 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6dcb\" (UniqueName: \"kubernetes.io/projected/c6cc5c85-1149-4388-8606-859f5c8c2a14-kube-api-access-b6dcb\") pod \"glance-default-internal-api-0\" (UID: \"c6cc5c85-1149-4388-8606-859f5c8c2a14\") " pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.483593 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.636462 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.670815 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="badfca4f-63b6-4ef4-bb69-da455c095844" path="/var/lib/kubelet/pods/badfca4f-63b6-4ef4-bb69-da455c095844/volumes" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.741870 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-config-data\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.742029 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-scripts\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.742076 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-combined-ca-bundle\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.742202 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw4jm\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-kube-api-access-pw4jm\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.742241 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-logs\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.742266 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-ceph\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.742402 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-httpd-run\") pod \"d9afebe2-9a18-46a4-b728-184544602da1\" (UID: \"d9afebe2-9a18-46a4-b728-184544602da1\") " Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.743507 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.743667 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-logs" (OuterVolumeSpecName: "logs") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.750801 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-kube-api-access-pw4jm" (OuterVolumeSpecName: "kube-api-access-pw4jm") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "kube-api-access-pw4jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.752212 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-scripts" (OuterVolumeSpecName: "scripts") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.755039 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-ceph" (OuterVolumeSpecName: "ceph") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.759164 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78f78d4bdf-qsbw9" event={"ID":"130a3c03-aef1-48df-be5c-d36902fe21b6","Type":"ContainerStarted","Data":"b0d23bb60475d8f3dca2ba9205760b527c3961549670b3cafc941b33fd090ac6"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.759204 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78f78d4bdf-qsbw9" event={"ID":"130a3c03-aef1-48df-be5c-d36902fe21b6","Type":"ContainerStarted","Data":"79a65d26ff192841236f5aebed0585def513dec41f63a9767adf52e0d83e6221"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.760999 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.761247 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d9afebe2-9a18-46a4-b728-184544602da1","Type":"ContainerDied","Data":"b3ee5ac96f9be918daa7e2891e30d0d0975a11793187423fa2187eac17dcc535"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.761281 4879 scope.go:117] "RemoveContainer" containerID="ea4560eee4b0c9f634d283236e61f653c8ce93b64b01edc7f6c3fc9ee0715dc0" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.765105 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-758fbfdb69-lk7dg" event={"ID":"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1","Type":"ContainerStarted","Data":"37cb2e770ffd93e7e38b5b507fc02050af18eb42a0706835a993871dcdd4afb9"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.765156 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-758fbfdb69-lk7dg" event={"ID":"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1","Type":"ContainerStarted","Data":"a59b40996084cd084271068887b3d923f8e3786f843643d03536270ba74856c6"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.770200 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc697759c-tbpxn" event={"ID":"6277eb20-df4e-483d-9cc2-8be3dd2eec60","Type":"ContainerStarted","Data":"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.770250 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc697759c-tbpxn" event={"ID":"6277eb20-df4e-483d-9cc2-8be3dd2eec60","Type":"ContainerStarted","Data":"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65"} Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.770411 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5cc697759c-tbpxn" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon-log" containerID="cri-o://601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65" gracePeriod=30 Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.770603 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5cc697759c-tbpxn" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon" containerID="cri-o://09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24" gracePeriod=30 Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.786754 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-78f78d4bdf-qsbw9" podStartSLOduration=2.514909122 podStartE2EDuration="8.786735282s" podCreationTimestamp="2025-11-25 16:08:59 +0000 UTC" firstStartedPulling="2025-11-25 16:09:00.133219138 +0000 UTC m=+6231.736632209" lastFinishedPulling="2025-11-25 16:09:06.405045298 +0000 UTC m=+6238.008458369" observedRunningTime="2025-11-25 16:09:07.780988066 +0000 UTC m=+6239.384401157" watchObservedRunningTime="2025-11-25 16:09:07.786735282 +0000 UTC m=+6239.390148353" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.812500 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5cc697759c-tbpxn" podStartSLOduration=2.961435854 podStartE2EDuration="9.812477311s" podCreationTimestamp="2025-11-25 16:08:58 +0000 UTC" firstStartedPulling="2025-11-25 16:08:59.526768511 +0000 UTC m=+6231.130181582" lastFinishedPulling="2025-11-25 16:09:06.377809968 +0000 UTC m=+6237.981223039" observedRunningTime="2025-11-25 16:09:07.806390535 +0000 UTC m=+6239.409803606" watchObservedRunningTime="2025-11-25 16:09:07.812477311 +0000 UTC m=+6239.415890392" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.814107 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.815907 4879 scope.go:117] "RemoveContainer" containerID="919c6dac1862c4e9c307b3e34b91ae3c16a3d0f3406d3f2a72b15f798112cbe7" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.827531 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-config-data" (OuterVolumeSpecName: "config-data") pod "d9afebe2-9a18-46a4-b728-184544602da1" (UID: "d9afebe2-9a18-46a4-b728-184544602da1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849822 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849863 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849881 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw4jm\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-kube-api-access-pw4jm\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849895 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849904 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d9afebe2-9a18-46a4-b728-184544602da1-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849913 4879 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d9afebe2-9a18-46a4-b728-184544602da1-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.849976 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9afebe2-9a18-46a4-b728-184544602da1-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:07 crc kubenswrapper[4879]: I1125 16:09:07.877410 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-758fbfdb69-lk7dg" podStartSLOduration=2.815735819 podStartE2EDuration="9.877388702s" podCreationTimestamp="2025-11-25 16:08:58 +0000 UTC" firstStartedPulling="2025-11-25 16:08:59.413253141 +0000 UTC m=+6231.016666212" lastFinishedPulling="2025-11-25 16:09:06.474906024 +0000 UTC m=+6238.078319095" observedRunningTime="2025-11-25 16:09:07.834595641 +0000 UTC m=+6239.438008712" watchObservedRunningTime="2025-11-25 16:09:07.877388702 +0000 UTC m=+6239.480801773" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.105021 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:09:08 crc kubenswrapper[4879]: W1125 16:09:08.118747 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6cc5c85_1149_4388_8606_859f5c8c2a14.slice/crio-e1812f999af39e27304a7eda18fb9c8f3cc82028d52e5e892427d5b2f5c7a413 WatchSource:0}: Error finding container e1812f999af39e27304a7eda18fb9c8f3cc82028d52e5e892427d5b2f5c7a413: Status 404 returned error can't find the container with id e1812f999af39e27304a7eda18fb9c8f3cc82028d52e5e892427d5b2f5c7a413 Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.131111 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.148331 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.162188 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:09:08 crc kubenswrapper[4879]: E1125 16:09:08.162755 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-httpd" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.162781 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-httpd" Nov 25 16:09:08 crc kubenswrapper[4879]: E1125 16:09:08.162821 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-log" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.162831 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-log" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.164236 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-httpd" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.164276 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9afebe2-9a18-46a4-b728-184544602da1" containerName="glance-log" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.165773 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.170296 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.171687 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.261931 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202b8ca0-3167-46f7-98b7-643afff13caf-logs\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.262020 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-config-data\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.262060 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/202b8ca0-3167-46f7-98b7-643afff13caf-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.262109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/202b8ca0-3167-46f7-98b7-643afff13caf-ceph\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.262157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-scripts\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.262202 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.262229 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8ldw\" (UniqueName: \"kubernetes.io/projected/202b8ca0-3167-46f7-98b7-643afff13caf-kube-api-access-b8ldw\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.364838 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/202b8ca0-3167-46f7-98b7-643afff13caf-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.364943 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/202b8ca0-3167-46f7-98b7-643afff13caf-ceph\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.364986 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-scripts\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.365044 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.365071 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8ldw\" (UniqueName: \"kubernetes.io/projected/202b8ca0-3167-46f7-98b7-643afff13caf-kube-api-access-b8ldw\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.365137 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202b8ca0-3167-46f7-98b7-643afff13caf-logs\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.365211 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-config-data\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.367386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/202b8ca0-3167-46f7-98b7-643afff13caf-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.368653 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/202b8ca0-3167-46f7-98b7-643afff13caf-logs\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.371449 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-config-data\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.373892 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-scripts\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.389081 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/202b8ca0-3167-46f7-98b7-643afff13caf-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.391049 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8ldw\" (UniqueName: \"kubernetes.io/projected/202b8ca0-3167-46f7-98b7-643afff13caf-kube-api-access-b8ldw\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.392843 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/202b8ca0-3167-46f7-98b7-643afff13caf-ceph\") pod \"glance-default-external-api-0\" (UID: \"202b8ca0-3167-46f7-98b7-643afff13caf\") " pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.564990 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.896431 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.896739 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.919706 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6cc5c85-1149-4388-8606-859f5c8c2a14","Type":"ContainerStarted","Data":"e1812f999af39e27304a7eda18fb9c8f3cc82028d52e5e892427d5b2f5c7a413"} Nov 25 16:09:08 crc kubenswrapper[4879]: I1125 16:09:08.983084 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.354078 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.662354 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9afebe2-9a18-46a4-b728-184544602da1" path="/var/lib/kubelet/pods/d9afebe2-9a18-46a4-b728-184544602da1/volumes" Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.677109 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.677182 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.929676 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"202b8ca0-3167-46f7-98b7-643afff13caf","Type":"ContainerStarted","Data":"ceac79d266347236eb33bb4993894ba61a72254f2096e1d2fc758597ce06cf16"} Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.931819 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6cc5c85-1149-4388-8606-859f5c8c2a14","Type":"ContainerStarted","Data":"8b252e97340ed432c7305590fa9e6d42c62484620a4612e449b92b993d0b26b1"} Nov 25 16:09:09 crc kubenswrapper[4879]: I1125 16:09:09.931864 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6cc5c85-1149-4388-8606-859f5c8c2a14","Type":"ContainerStarted","Data":"268d482c950e200a7defb1e15a234b7f3dd1df16a3bffb9ccede16cdce96cf81"} Nov 25 16:09:10 crc kubenswrapper[4879]: I1125 16:09:10.948804 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"202b8ca0-3167-46f7-98b7-643afff13caf","Type":"ContainerStarted","Data":"35a57dffd481b54529aa4d66261904a522263ef37ecc935a7ecb47d65f603414"} Nov 25 16:09:10 crc kubenswrapper[4879]: I1125 16:09:10.988571 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.98855117 podStartE2EDuration="3.98855117s" podCreationTimestamp="2025-11-25 16:09:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:09:10.969262986 +0000 UTC m=+6242.572676067" watchObservedRunningTime="2025-11-25 16:09:10.98855117 +0000 UTC m=+6242.591964241" Nov 25 16:09:11 crc kubenswrapper[4879]: I1125 16:09:11.962211 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"202b8ca0-3167-46f7-98b7-643afff13caf","Type":"ContainerStarted","Data":"47ab303d3b8c925c94d6a1443aa686d19ce0f2a7baebeed7f9ae3f2af6064c92"} Nov 25 16:09:11 crc kubenswrapper[4879]: I1125 16:09:11.985344 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.98532835 podStartE2EDuration="3.98532835s" podCreationTimestamp="2025-11-25 16:09:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:09:11.98388383 +0000 UTC m=+6243.587296901" watchObservedRunningTime="2025-11-25 16:09:11.98532835 +0000 UTC m=+6243.588741421" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.409843 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.410475 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.484764 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.484825 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.520113 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.524653 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.804863 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:17 crc kubenswrapper[4879]: I1125 16:09:17.806214 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.566623 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.566707 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.601030 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.614322 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.815263 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.815601 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 25 16:09:18 crc kubenswrapper[4879]: I1125 16:09:18.889368 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-758fbfdb69-lk7dg" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Nov 25 16:09:19 crc kubenswrapper[4879]: I1125 16:09:19.677263 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-78f78d4bdf-qsbw9" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.118:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.118:8080: connect: connection refused" Nov 25 16:09:19 crc kubenswrapper[4879]: I1125 16:09:19.823736 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 16:09:19 crc kubenswrapper[4879]: I1125 16:09:19.823770 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 16:09:20 crc kubenswrapper[4879]: I1125 16:09:20.117789 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:20 crc kubenswrapper[4879]: I1125 16:09:20.138355 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 25 16:09:20 crc kubenswrapper[4879]: I1125 16:09:20.842294 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 16:09:20 crc kubenswrapper[4879]: I1125 16:09:20.843701 4879 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 16:09:20 crc kubenswrapper[4879]: I1125 16:09:20.893219 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 16:09:21 crc kubenswrapper[4879]: I1125 16:09:21.090537 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 25 16:09:25 crc kubenswrapper[4879]: I1125 16:09:25.057218 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-cl6z2"] Nov 25 16:09:25 crc kubenswrapper[4879]: I1125 16:09:25.072742 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-3d30-account-create-vpdnx"] Nov 25 16:09:25 crc kubenswrapper[4879]: I1125 16:09:25.086467 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-cl6z2"] Nov 25 16:09:25 crc kubenswrapper[4879]: I1125 16:09:25.098186 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-3d30-account-create-vpdnx"] Nov 25 16:09:25 crc kubenswrapper[4879]: I1125 16:09:25.659850 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="422d9538-42a1-42bb-8940-0e92f8f7e665" path="/var/lib/kubelet/pods/422d9538-42a1-42bb-8940-0e92f8f7e665/volumes" Nov 25 16:09:25 crc kubenswrapper[4879]: I1125 16:09:25.660987 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a08e55e8-641b-4464-82cd-9414c1b7dab0" path="/var/lib/kubelet/pods/a08e55e8-641b-4464-82cd-9414c1b7dab0/volumes" Nov 25 16:09:28 crc kubenswrapper[4879]: I1125 16:09:28.888103 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-758fbfdb69-lk7dg" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Nov 25 16:09:31 crc kubenswrapper[4879]: I1125 16:09:31.485910 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:09:32 crc kubenswrapper[4879]: I1125 16:09:32.033673 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-2hwt8"] Nov 25 16:09:32 crc kubenswrapper[4879]: I1125 16:09:32.041959 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-2hwt8"] Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.141541 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.201108 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-758fbfdb69-lk7dg"] Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.201811 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-758fbfdb69-lk7dg" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon-log" containerID="cri-o://a59b40996084cd084271068887b3d923f8e3786f843643d03536270ba74856c6" gracePeriod=30 Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.201979 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-758fbfdb69-lk7dg" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon" containerID="cri-o://37cb2e770ffd93e7e38b5b507fc02050af18eb42a0706835a993871dcdd4afb9" gracePeriod=30 Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.661562 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="072c3164-fa81-4e53-8f73-c0c303da55e6" path="/var/lib/kubelet/pods/072c3164-fa81-4e53-8f73-c0c303da55e6/volumes" Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.969985 4879 generic.go:334] "Generic (PLEG): container finished" podID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerID="37cb2e770ffd93e7e38b5b507fc02050af18eb42a0706835a993871dcdd4afb9" exitCode=0 Nov 25 16:09:33 crc kubenswrapper[4879]: I1125 16:09:33.970044 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-758fbfdb69-lk7dg" event={"ID":"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1","Type":"ContainerDied","Data":"37cb2e770ffd93e7e38b5b507fc02050af18eb42a0706835a993871dcdd4afb9"} Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.542563 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tgf6n"] Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.545511 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.554814 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tgf6n"] Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.703823 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-catalog-content\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.704360 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-utilities\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.704435 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp55m\" (UniqueName: \"kubernetes.io/projected/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-kube-api-access-zp55m\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.770301 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.806544 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-utilities\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.806615 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp55m\" (UniqueName: \"kubernetes.io/projected/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-kube-api-access-zp55m\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.806659 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-catalog-content\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.807191 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-utilities\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.807319 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-catalog-content\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.828530 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp55m\" (UniqueName: \"kubernetes.io/projected/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-kube-api-access-zp55m\") pod \"certified-operators-tgf6n\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.880340 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.908471 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-598q8\" (UniqueName: \"kubernetes.io/projected/6277eb20-df4e-483d-9cc2-8be3dd2eec60-kube-api-access-598q8\") pod \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.909608 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-scripts\") pod \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.909638 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6277eb20-df4e-483d-9cc2-8be3dd2eec60-logs\") pod \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.909697 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6277eb20-df4e-483d-9cc2-8be3dd2eec60-horizon-secret-key\") pod \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.909842 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-config-data\") pod \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\" (UID: \"6277eb20-df4e-483d-9cc2-8be3dd2eec60\") " Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.910065 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6277eb20-df4e-483d-9cc2-8be3dd2eec60-logs" (OuterVolumeSpecName: "logs") pod "6277eb20-df4e-483d-9cc2-8be3dd2eec60" (UID: "6277eb20-df4e-483d-9cc2-8be3dd2eec60"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.910754 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6277eb20-df4e-483d-9cc2-8be3dd2eec60-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.918743 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6277eb20-df4e-483d-9cc2-8be3dd2eec60-kube-api-access-598q8" (OuterVolumeSpecName: "kube-api-access-598q8") pod "6277eb20-df4e-483d-9cc2-8be3dd2eec60" (UID: "6277eb20-df4e-483d-9cc2-8be3dd2eec60"). InnerVolumeSpecName "kube-api-access-598q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.927232 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6277eb20-df4e-483d-9cc2-8be3dd2eec60-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6277eb20-df4e-483d-9cc2-8be3dd2eec60" (UID: "6277eb20-df4e-483d-9cc2-8be3dd2eec60"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.944770 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-config-data" (OuterVolumeSpecName: "config-data") pod "6277eb20-df4e-483d-9cc2-8be3dd2eec60" (UID: "6277eb20-df4e-483d-9cc2-8be3dd2eec60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:09:38 crc kubenswrapper[4879]: I1125 16:09:38.950161 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-scripts" (OuterVolumeSpecName: "scripts") pod "6277eb20-df4e-483d-9cc2-8be3dd2eec60" (UID: "6277eb20-df4e-483d-9cc2-8be3dd2eec60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.012185 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-598q8\" (UniqueName: \"kubernetes.io/projected/6277eb20-df4e-483d-9cc2-8be3dd2eec60-kube-api-access-598q8\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.012478 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.012490 4879 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6277eb20-df4e-483d-9cc2-8be3dd2eec60-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.012503 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6277eb20-df4e-483d-9cc2-8be3dd2eec60-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.023830 4879 generic.go:334] "Generic (PLEG): container finished" podID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerID="09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24" exitCode=137 Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.023861 4879 generic.go:334] "Generic (PLEG): container finished" podID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerID="601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65" exitCode=137 Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.023882 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc697759c-tbpxn" event={"ID":"6277eb20-df4e-483d-9cc2-8be3dd2eec60","Type":"ContainerDied","Data":"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24"} Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.023913 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc697759c-tbpxn" event={"ID":"6277eb20-df4e-483d-9cc2-8be3dd2eec60","Type":"ContainerDied","Data":"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65"} Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.023924 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc697759c-tbpxn" event={"ID":"6277eb20-df4e-483d-9cc2-8be3dd2eec60","Type":"ContainerDied","Data":"d924bbfcf202f15db9a57a49234192fe1392fc9a74c2b432e92b748ee6b42b62"} Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.023940 4879 scope.go:117] "RemoveContainer" containerID="09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.024074 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc697759c-tbpxn" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.202982 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cc697759c-tbpxn"] Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.212813 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5cc697759c-tbpxn"] Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.346301 4879 scope.go:117] "RemoveContainer" containerID="601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.409261 4879 scope.go:117] "RemoveContainer" containerID="09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24" Nov 25 16:09:39 crc kubenswrapper[4879]: E1125 16:09:39.412041 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24\": container with ID starting with 09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24 not found: ID does not exist" containerID="09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.412091 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24"} err="failed to get container status \"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24\": rpc error: code = NotFound desc = could not find container \"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24\": container with ID starting with 09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24 not found: ID does not exist" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.412138 4879 scope.go:117] "RemoveContainer" containerID="601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65" Nov 25 16:09:39 crc kubenswrapper[4879]: E1125 16:09:39.413047 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65\": container with ID starting with 601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65 not found: ID does not exist" containerID="601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.413072 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65"} err="failed to get container status \"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65\": rpc error: code = NotFound desc = could not find container \"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65\": container with ID starting with 601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65 not found: ID does not exist" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.413088 4879 scope.go:117] "RemoveContainer" containerID="09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.413338 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24"} err="failed to get container status \"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24\": rpc error: code = NotFound desc = could not find container \"09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24\": container with ID starting with 09ced6961f7583e1d89a828f6850672c8921dbe9e4b8343147200374019bde24 not found: ID does not exist" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.413359 4879 scope.go:117] "RemoveContainer" containerID="601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65" Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.413663 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65"} err="failed to get container status \"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65\": rpc error: code = NotFound desc = could not find container \"601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65\": container with ID starting with 601a3bce8f75feec453f71831d35d629254ac8f596616d127b0a0a41cbd7ff65 not found: ID does not exist" Nov 25 16:09:39 crc kubenswrapper[4879]: W1125 16:09:39.595976 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f91ae93_0738_4ef2_a19d_b2d97c2608ed.slice/crio-8e297b7e9f98e2602297b1656c9dd1b891b785ddd0fc8d1b2e6ed9a26fa020c5 WatchSource:0}: Error finding container 8e297b7e9f98e2602297b1656c9dd1b891b785ddd0fc8d1b2e6ed9a26fa020c5: Status 404 returned error can't find the container with id 8e297b7e9f98e2602297b1656c9dd1b891b785ddd0fc8d1b2e6ed9a26fa020c5 Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.596534 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tgf6n"] Nov 25 16:09:39 crc kubenswrapper[4879]: I1125 16:09:39.659204 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" path="/var/lib/kubelet/pods/6277eb20-df4e-483d-9cc2-8be3dd2eec60/volumes" Nov 25 16:09:40 crc kubenswrapper[4879]: I1125 16:09:40.035863 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerID="ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193" exitCode=0 Nov 25 16:09:40 crc kubenswrapper[4879]: I1125 16:09:40.036172 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerDied","Data":"ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193"} Nov 25 16:09:40 crc kubenswrapper[4879]: I1125 16:09:40.036247 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerStarted","Data":"8e297b7e9f98e2602297b1656c9dd1b891b785ddd0fc8d1b2e6ed9a26fa020c5"} Nov 25 16:09:40 crc kubenswrapper[4879]: I1125 16:09:40.037590 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:09:41 crc kubenswrapper[4879]: I1125 16:09:41.046977 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerStarted","Data":"170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe"} Nov 25 16:09:42 crc kubenswrapper[4879]: I1125 16:09:42.058745 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerID="170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe" exitCode=0 Nov 25 16:09:42 crc kubenswrapper[4879]: I1125 16:09:42.058795 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerDied","Data":"170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe"} Nov 25 16:09:43 crc kubenswrapper[4879]: I1125 16:09:43.071829 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerStarted","Data":"30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02"} Nov 25 16:09:43 crc kubenswrapper[4879]: I1125 16:09:43.096018 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tgf6n" podStartSLOduration=2.664146396 podStartE2EDuration="5.095993879s" podCreationTimestamp="2025-11-25 16:09:38 +0000 UTC" firstStartedPulling="2025-11-25 16:09:40.037402728 +0000 UTC m=+6271.640815799" lastFinishedPulling="2025-11-25 16:09:42.469250171 +0000 UTC m=+6274.072663282" observedRunningTime="2025-11-25 16:09:43.089748999 +0000 UTC m=+6274.693162080" watchObservedRunningTime="2025-11-25 16:09:43.095993879 +0000 UTC m=+6274.699406970" Nov 25 16:09:47 crc kubenswrapper[4879]: I1125 16:09:47.408910 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:09:47 crc kubenswrapper[4879]: I1125 16:09:47.409679 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:09:47 crc kubenswrapper[4879]: I1125 16:09:47.409746 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:09:47 crc kubenswrapper[4879]: I1125 16:09:47.410803 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b21237490f0aefdd6974be0e787e306742d2154d0ff6faeec34f7e9ec28b48b8"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:09:47 crc kubenswrapper[4879]: I1125 16:09:47.410905 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://b21237490f0aefdd6974be0e787e306742d2154d0ff6faeec34f7e9ec28b48b8" gracePeriod=600 Nov 25 16:09:48 crc kubenswrapper[4879]: I1125 16:09:48.129865 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="b21237490f0aefdd6974be0e787e306742d2154d0ff6faeec34f7e9ec28b48b8" exitCode=0 Nov 25 16:09:48 crc kubenswrapper[4879]: I1125 16:09:48.129915 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"b21237490f0aefdd6974be0e787e306742d2154d0ff6faeec34f7e9ec28b48b8"} Nov 25 16:09:48 crc kubenswrapper[4879]: I1125 16:09:48.130222 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4"} Nov 25 16:09:48 crc kubenswrapper[4879]: I1125 16:09:48.130248 4879 scope.go:117] "RemoveContainer" containerID="1815dd7ba358fd3a8d4d6114afb058001b2d842d19aac242d3e7e077ff14a85f" Nov 25 16:09:48 crc kubenswrapper[4879]: I1125 16:09:48.880945 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:48 crc kubenswrapper[4879]: I1125 16:09:48.881227 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:49 crc kubenswrapper[4879]: I1125 16:09:49.037971 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:49 crc kubenswrapper[4879]: I1125 16:09:49.189643 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:49 crc kubenswrapper[4879]: I1125 16:09:49.279801 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tgf6n"] Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.160666 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tgf6n" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="registry-server" containerID="cri-o://30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02" gracePeriod=2 Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.385246 4879 scope.go:117] "RemoveContainer" containerID="084c0162e8174faf7d9c9ca6ce874de1290fb4ce4ea2b385a1e6ffe32527189e" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.413378 4879 scope.go:117] "RemoveContainer" containerID="c8d00db643bb70b03b1122054d30fd3cf5e95f9800ef4c151258068b1a56f710" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.458940 4879 scope.go:117] "RemoveContainer" containerID="d7bb95e3356be281ebd2b6906bb33a9f5078552ea2c8f08bfb96df92a1884f35" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.487010 4879 scope.go:117] "RemoveContainer" containerID="666b9994e53fb06538c80a83678bf387cebfa43ef1b6ae158ff69ade37c21dcb" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.560287 4879 scope.go:117] "RemoveContainer" containerID="d5603e49571c4f4afbb7b7c251e432a3fd0d37b2fbbdd7ec9b25c9a3ef25bd0a" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.659680 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.671705 4879 scope.go:117] "RemoveContainer" containerID="412dcc7cbf510b3ae43bbac4a484c394bdc03e83273b5bbd5f095b3569b7cf4b" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.780565 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-utilities\") pod \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.780708 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-catalog-content\") pod \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.780746 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp55m\" (UniqueName: \"kubernetes.io/projected/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-kube-api-access-zp55m\") pod \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\" (UID: \"8f91ae93-0738-4ef2-a19d-b2d97c2608ed\") " Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.781914 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-utilities" (OuterVolumeSpecName: "utilities") pod "8f91ae93-0738-4ef2-a19d-b2d97c2608ed" (UID: "8f91ae93-0738-4ef2-a19d-b2d97c2608ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.787148 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-kube-api-access-zp55m" (OuterVolumeSpecName: "kube-api-access-zp55m") pod "8f91ae93-0738-4ef2-a19d-b2d97c2608ed" (UID: "8f91ae93-0738-4ef2-a19d-b2d97c2608ed"). InnerVolumeSpecName "kube-api-access-zp55m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.830825 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8f91ae93-0738-4ef2-a19d-b2d97c2608ed" (UID: "8f91ae93-0738-4ef2-a19d-b2d97c2608ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.883477 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.883506 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:51 crc kubenswrapper[4879]: I1125 16:09:51.883517 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp55m\" (UniqueName: \"kubernetes.io/projected/8f91ae93-0738-4ef2-a19d-b2d97c2608ed-kube-api-access-zp55m\") on node \"crc\" DevicePath \"\"" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.171166 4879 generic.go:334] "Generic (PLEG): container finished" podID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerID="30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02" exitCode=0 Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.171214 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerDied","Data":"30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02"} Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.171236 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgf6n" event={"ID":"8f91ae93-0738-4ef2-a19d-b2d97c2608ed","Type":"ContainerDied","Data":"8e297b7e9f98e2602297b1656c9dd1b891b785ddd0fc8d1b2e6ed9a26fa020c5"} Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.171235 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgf6n" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.171313 4879 scope.go:117] "RemoveContainer" containerID="30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.193026 4879 scope.go:117] "RemoveContainer" containerID="170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.203548 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tgf6n"] Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.213520 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tgf6n"] Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.227736 4879 scope.go:117] "RemoveContainer" containerID="ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.244554 4879 scope.go:117] "RemoveContainer" containerID="30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02" Nov 25 16:09:52 crc kubenswrapper[4879]: E1125 16:09:52.244904 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02\": container with ID starting with 30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02 not found: ID does not exist" containerID="30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.244949 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02"} err="failed to get container status \"30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02\": rpc error: code = NotFound desc = could not find container \"30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02\": container with ID starting with 30a0c3b359698785070a1455cbc8d2aff7828310656ee0ee8d129a72fcf8af02 not found: ID does not exist" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.244983 4879 scope.go:117] "RemoveContainer" containerID="170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe" Nov 25 16:09:52 crc kubenswrapper[4879]: E1125 16:09:52.245403 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe\": container with ID starting with 170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe not found: ID does not exist" containerID="170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.245473 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe"} err="failed to get container status \"170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe\": rpc error: code = NotFound desc = could not find container \"170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe\": container with ID starting with 170d8f8f286df063827b6ea71c8b97235e24e7418f8fd0ee4e5f9e3873c842fe not found: ID does not exist" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.245506 4879 scope.go:117] "RemoveContainer" containerID="ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193" Nov 25 16:09:52 crc kubenswrapper[4879]: E1125 16:09:52.245736 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193\": container with ID starting with ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193 not found: ID does not exist" containerID="ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193" Nov 25 16:09:52 crc kubenswrapper[4879]: I1125 16:09:52.245762 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193"} err="failed to get container status \"ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193\": rpc error: code = NotFound desc = could not find container \"ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193\": container with ID starting with ed36ee5f04151c57324184ba55f1744d7285849e888ff45572f1ba3f27f7e193 not found: ID does not exist" Nov 25 16:09:53 crc kubenswrapper[4879]: I1125 16:09:53.665430 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" path="/var/lib/kubelet/pods/8f91ae93-0738-4ef2-a19d-b2d97c2608ed/volumes" Nov 25 16:10:00 crc kubenswrapper[4879]: I1125 16:10:00.040837 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-md922"] Nov 25 16:10:00 crc kubenswrapper[4879]: I1125 16:10:00.053150 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-53eb-account-create-2jd8c"] Nov 25 16:10:00 crc kubenswrapper[4879]: I1125 16:10:00.063197 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-md922"] Nov 25 16:10:00 crc kubenswrapper[4879]: I1125 16:10:00.073348 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-53eb-account-create-2jd8c"] Nov 25 16:10:01 crc kubenswrapper[4879]: I1125 16:10:01.660778 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc8d596-9a9f-4e6b-925a-14d04a259781" path="/var/lib/kubelet/pods/0fc8d596-9a9f-4e6b-925a-14d04a259781/volumes" Nov 25 16:10:01 crc kubenswrapper[4879]: I1125 16:10:01.663631 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a20f1c1-7078-41b2-a79f-bfad722b8f70" path="/var/lib/kubelet/pods/7a20f1c1-7078-41b2-a79f-bfad722b8f70/volumes" Nov 25 16:10:03 crc kubenswrapper[4879]: I1125 16:10:03.306849 4879 generic.go:334] "Generic (PLEG): container finished" podID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerID="a59b40996084cd084271068887b3d923f8e3786f843643d03536270ba74856c6" exitCode=137 Nov 25 16:10:03 crc kubenswrapper[4879]: I1125 16:10:03.306919 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-758fbfdb69-lk7dg" event={"ID":"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1","Type":"ContainerDied","Data":"a59b40996084cd084271068887b3d923f8e3786f843643d03536270ba74856c6"} Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.182956 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.322182 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-758fbfdb69-lk7dg" event={"ID":"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1","Type":"ContainerDied","Data":"b1ca740b33aea01a57f2b2565a64da1dea63175d8d2ecda1adb1b943fb0ae619"} Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.322342 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-758fbfdb69-lk7dg" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.322720 4879 scope.go:117] "RemoveContainer" containerID="37cb2e770ffd93e7e38b5b507fc02050af18eb42a0706835a993871dcdd4afb9" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.339885 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-config-data\") pod \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.340002 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-horizon-secret-key\") pod \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.340039 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-scripts\") pod \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.340212 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-logs\") pod \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.340378 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9j2cd\" (UniqueName: \"kubernetes.io/projected/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-kube-api-access-9j2cd\") pod \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\" (UID: \"f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1\") " Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.340872 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-logs" (OuterVolumeSpecName: "logs") pod "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" (UID: "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.342020 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.352301 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" (UID: "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.362948 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-kube-api-access-9j2cd" (OuterVolumeSpecName: "kube-api-access-9j2cd") pod "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" (UID: "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1"). InnerVolumeSpecName "kube-api-access-9j2cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.371834 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-config-data" (OuterVolumeSpecName: "config-data") pod "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" (UID: "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.374420 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-scripts" (OuterVolumeSpecName: "scripts") pod "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" (UID: "f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.444293 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9j2cd\" (UniqueName: \"kubernetes.io/projected/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-kube-api-access-9j2cd\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.444334 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.444345 4879 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.444354 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.501766 4879 scope.go:117] "RemoveContainer" containerID="a59b40996084cd084271068887b3d923f8e3786f843643d03536270ba74856c6" Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.659493 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-758fbfdb69-lk7dg"] Nov 25 16:10:04 crc kubenswrapper[4879]: I1125 16:10:04.668504 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-758fbfdb69-lk7dg"] Nov 25 16:10:05 crc kubenswrapper[4879]: I1125 16:10:05.660646 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" path="/var/lib/kubelet/pods/f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1/volumes" Nov 25 16:10:09 crc kubenswrapper[4879]: I1125 16:10:09.032450 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-2tc2v"] Nov 25 16:10:09 crc kubenswrapper[4879]: I1125 16:10:09.044639 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-2tc2v"] Nov 25 16:10:09 crc kubenswrapper[4879]: I1125 16:10:09.659181 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64721e41-24b7-4492-825b-f25a899324f2" path="/var/lib/kubelet/pods/64721e41-24b7-4492-825b-f25a899324f2/volumes" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.703670 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-dfc55cbfc-rlmwz"] Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704762 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon-log" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704780 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon-log" Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704803 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704810 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon" Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704840 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon-log" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704848 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon-log" Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704861 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704868 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon" Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704891 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="registry-server" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704899 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="registry-server" Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704917 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="extract-content" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704925 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="extract-content" Nov 25 16:10:16 crc kubenswrapper[4879]: E1125 16:10:16.704939 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="extract-utilities" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.704949 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="extract-utilities" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.705203 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f91ae93-0738-4ef2-a19d-b2d97c2608ed" containerName="registry-server" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.705216 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon-log" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.705228 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon-log" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.705244 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0209cfc-f2c2-4161-bcf1-7a7addc1e1c1" containerName="horizon" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.705271 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6277eb20-df4e-483d-9cc2-8be3dd2eec60" containerName="horizon" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.706644 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.728517 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dfc55cbfc-rlmwz"] Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.747568 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb8sj\" (UniqueName: \"kubernetes.io/projected/1368a223-0bf5-4797-b790-993da5053700-kube-api-access-lb8sj\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.747893 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1368a223-0bf5-4797-b790-993da5053700-scripts\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.748083 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1368a223-0bf5-4797-b790-993da5053700-horizon-secret-key\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.748241 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1368a223-0bf5-4797-b790-993da5053700-logs\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.748259 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1368a223-0bf5-4797-b790-993da5053700-config-data\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.850091 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1368a223-0bf5-4797-b790-993da5053700-scripts\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.850510 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1368a223-0bf5-4797-b790-993da5053700-horizon-secret-key\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.850590 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1368a223-0bf5-4797-b790-993da5053700-logs\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.850621 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1368a223-0bf5-4797-b790-993da5053700-config-data\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.850767 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb8sj\" (UniqueName: \"kubernetes.io/projected/1368a223-0bf5-4797-b790-993da5053700-kube-api-access-lb8sj\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.851076 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1368a223-0bf5-4797-b790-993da5053700-logs\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.851498 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1368a223-0bf5-4797-b790-993da5053700-scripts\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.851902 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1368a223-0bf5-4797-b790-993da5053700-config-data\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.856917 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1368a223-0bf5-4797-b790-993da5053700-horizon-secret-key\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:16 crc kubenswrapper[4879]: I1125 16:10:16.869852 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb8sj\" (UniqueName: \"kubernetes.io/projected/1368a223-0bf5-4797-b790-993da5053700-kube-api-access-lb8sj\") pod \"horizon-dfc55cbfc-rlmwz\" (UID: \"1368a223-0bf5-4797-b790-993da5053700\") " pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.027838 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.506594 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dfc55cbfc-rlmwz"] Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.650337 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfc55cbfc-rlmwz" event={"ID":"1368a223-0bf5-4797-b790-993da5053700","Type":"ContainerStarted","Data":"b9ecb3b407c71961c2ccfbda3cb939c32a2384dea255830a888eafdd2d4fc0b9"} Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.949895 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-5w25m"] Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.951705 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5w25m" Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.960393 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-a082-account-create-q8ptq"] Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.962705 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.969317 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.975620 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-5w25m"] Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.976971 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a66844e-4198-4176-a3b5-1a0cff251dff-operator-scripts\") pod \"heat-db-create-5w25m\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " pod="openstack/heat-db-create-5w25m" Nov 25 16:10:17 crc kubenswrapper[4879]: I1125 16:10:17.977166 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbtwp\" (UniqueName: \"kubernetes.io/projected/2a66844e-4198-4176-a3b5-1a0cff251dff-kube-api-access-nbtwp\") pod \"heat-db-create-5w25m\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " pod="openstack/heat-db-create-5w25m" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.007362 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-a082-account-create-q8ptq"] Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.079750 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbtwp\" (UniqueName: \"kubernetes.io/projected/2a66844e-4198-4176-a3b5-1a0cff251dff-kube-api-access-nbtwp\") pod \"heat-db-create-5w25m\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " pod="openstack/heat-db-create-5w25m" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.079846 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a66844e-4198-4176-a3b5-1a0cff251dff-operator-scripts\") pod \"heat-db-create-5w25m\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " pod="openstack/heat-db-create-5w25m" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.079917 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5rfp\" (UniqueName: \"kubernetes.io/projected/4d33f0b6-2083-4926-87bb-ea4c103c19d1-kube-api-access-j5rfp\") pod \"heat-a082-account-create-q8ptq\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.079985 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d33f0b6-2083-4926-87bb-ea4c103c19d1-operator-scripts\") pod \"heat-a082-account-create-q8ptq\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.080613 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a66844e-4198-4176-a3b5-1a0cff251dff-operator-scripts\") pod \"heat-db-create-5w25m\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " pod="openstack/heat-db-create-5w25m" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.102187 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbtwp\" (UniqueName: \"kubernetes.io/projected/2a66844e-4198-4176-a3b5-1a0cff251dff-kube-api-access-nbtwp\") pod \"heat-db-create-5w25m\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " pod="openstack/heat-db-create-5w25m" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.181862 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5rfp\" (UniqueName: \"kubernetes.io/projected/4d33f0b6-2083-4926-87bb-ea4c103c19d1-kube-api-access-j5rfp\") pod \"heat-a082-account-create-q8ptq\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.181929 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d33f0b6-2083-4926-87bb-ea4c103c19d1-operator-scripts\") pod \"heat-a082-account-create-q8ptq\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.182676 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d33f0b6-2083-4926-87bb-ea4c103c19d1-operator-scripts\") pod \"heat-a082-account-create-q8ptq\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.200321 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5rfp\" (UniqueName: \"kubernetes.io/projected/4d33f0b6-2083-4926-87bb-ea4c103c19d1-kube-api-access-j5rfp\") pod \"heat-a082-account-create-q8ptq\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.328826 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5w25m" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.360410 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.674720 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfc55cbfc-rlmwz" event={"ID":"1368a223-0bf5-4797-b790-993da5053700","Type":"ContainerStarted","Data":"1219d4e5d566181d101fb9f9f25c88055b55ece0b1bd72a21dc67d6616abf7bd"} Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.675280 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dfc55cbfc-rlmwz" event={"ID":"1368a223-0bf5-4797-b790-993da5053700","Type":"ContainerStarted","Data":"6de2ab9df1b7390d54be310ffd88d50235986a12ee616b0624f30fd8ea8f3dc7"} Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.701756 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-dfc55cbfc-rlmwz" podStartSLOduration=2.7017343719999998 podStartE2EDuration="2.701734372s" podCreationTimestamp="2025-11-25 16:10:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:10:18.700082018 +0000 UTC m=+6310.303495109" watchObservedRunningTime="2025-11-25 16:10:18.701734372 +0000 UTC m=+6310.305147443" Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.846796 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-a082-account-create-q8ptq"] Nov 25 16:10:18 crc kubenswrapper[4879]: W1125 16:10:18.852592 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d33f0b6_2083_4926_87bb_ea4c103c19d1.slice/crio-266c4b799e520ba0ea3d00ca47fab9e1c360bd3d3171f4f4c4e6a379194ffdb5 WatchSource:0}: Error finding container 266c4b799e520ba0ea3d00ca47fab9e1c360bd3d3171f4f4c4e6a379194ffdb5: Status 404 returned error can't find the container with id 266c4b799e520ba0ea3d00ca47fab9e1c360bd3d3171f4f4c4e6a379194ffdb5 Nov 25 16:10:18 crc kubenswrapper[4879]: W1125 16:10:18.854891 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a66844e_4198_4176_a3b5_1a0cff251dff.slice/crio-9e0a284010e887c0aa021094f1e8b4c100f687b731fa07ab21a5f387ab5fbc02 WatchSource:0}: Error finding container 9e0a284010e887c0aa021094f1e8b4c100f687b731fa07ab21a5f387ab5fbc02: Status 404 returned error can't find the container with id 9e0a284010e887c0aa021094f1e8b4c100f687b731fa07ab21a5f387ab5fbc02 Nov 25 16:10:18 crc kubenswrapper[4879]: I1125 16:10:18.858539 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-5w25m"] Nov 25 16:10:19 crc kubenswrapper[4879]: I1125 16:10:19.686822 4879 generic.go:334] "Generic (PLEG): container finished" podID="4d33f0b6-2083-4926-87bb-ea4c103c19d1" containerID="232a8549eb72c624baff9fc81e2b0ac7d614977fe6e1113a5fd72696aea08b71" exitCode=0 Nov 25 16:10:19 crc kubenswrapper[4879]: I1125 16:10:19.686917 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a082-account-create-q8ptq" event={"ID":"4d33f0b6-2083-4926-87bb-ea4c103c19d1","Type":"ContainerDied","Data":"232a8549eb72c624baff9fc81e2b0ac7d614977fe6e1113a5fd72696aea08b71"} Nov 25 16:10:19 crc kubenswrapper[4879]: I1125 16:10:19.687458 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a082-account-create-q8ptq" event={"ID":"4d33f0b6-2083-4926-87bb-ea4c103c19d1","Type":"ContainerStarted","Data":"266c4b799e520ba0ea3d00ca47fab9e1c360bd3d3171f4f4c4e6a379194ffdb5"} Nov 25 16:10:19 crc kubenswrapper[4879]: I1125 16:10:19.688903 4879 generic.go:334] "Generic (PLEG): container finished" podID="2a66844e-4198-4176-a3b5-1a0cff251dff" containerID="137a3d0d5fd3865902f93b4c1e1baab545cdea3babed3ba115ac17c6202a2698" exitCode=0 Nov 25 16:10:19 crc kubenswrapper[4879]: I1125 16:10:19.689863 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5w25m" event={"ID":"2a66844e-4198-4176-a3b5-1a0cff251dff","Type":"ContainerDied","Data":"137a3d0d5fd3865902f93b4c1e1baab545cdea3babed3ba115ac17c6202a2698"} Nov 25 16:10:19 crc kubenswrapper[4879]: I1125 16:10:19.689895 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5w25m" event={"ID":"2a66844e-4198-4176-a3b5-1a0cff251dff","Type":"ContainerStarted","Data":"9e0a284010e887c0aa021094f1e8b4c100f687b731fa07ab21a5f387ab5fbc02"} Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.124343 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5w25m" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.143470 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a66844e-4198-4176-a3b5-1a0cff251dff-operator-scripts\") pod \"2a66844e-4198-4176-a3b5-1a0cff251dff\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.143540 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbtwp\" (UniqueName: \"kubernetes.io/projected/2a66844e-4198-4176-a3b5-1a0cff251dff-kube-api-access-nbtwp\") pod \"2a66844e-4198-4176-a3b5-1a0cff251dff\" (UID: \"2a66844e-4198-4176-a3b5-1a0cff251dff\") " Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.143950 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a66844e-4198-4176-a3b5-1a0cff251dff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a66844e-4198-4176-a3b5-1a0cff251dff" (UID: "2a66844e-4198-4176-a3b5-1a0cff251dff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.144631 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a66844e-4198-4176-a3b5-1a0cff251dff-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.145502 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.150316 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a66844e-4198-4176-a3b5-1a0cff251dff-kube-api-access-nbtwp" (OuterVolumeSpecName: "kube-api-access-nbtwp") pod "2a66844e-4198-4176-a3b5-1a0cff251dff" (UID: "2a66844e-4198-4176-a3b5-1a0cff251dff"). InnerVolumeSpecName "kube-api-access-nbtwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.246434 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d33f0b6-2083-4926-87bb-ea4c103c19d1-operator-scripts\") pod \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.246886 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d33f0b6-2083-4926-87bb-ea4c103c19d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4d33f0b6-2083-4926-87bb-ea4c103c19d1" (UID: "4d33f0b6-2083-4926-87bb-ea4c103c19d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.247184 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5rfp\" (UniqueName: \"kubernetes.io/projected/4d33f0b6-2083-4926-87bb-ea4c103c19d1-kube-api-access-j5rfp\") pod \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\" (UID: \"4d33f0b6-2083-4926-87bb-ea4c103c19d1\") " Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.247841 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d33f0b6-2083-4926-87bb-ea4c103c19d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.247862 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbtwp\" (UniqueName: \"kubernetes.io/projected/2a66844e-4198-4176-a3b5-1a0cff251dff-kube-api-access-nbtwp\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.251164 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d33f0b6-2083-4926-87bb-ea4c103c19d1-kube-api-access-j5rfp" (OuterVolumeSpecName: "kube-api-access-j5rfp") pod "4d33f0b6-2083-4926-87bb-ea4c103c19d1" (UID: "4d33f0b6-2083-4926-87bb-ea4c103c19d1"). InnerVolumeSpecName "kube-api-access-j5rfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.350108 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5rfp\" (UniqueName: \"kubernetes.io/projected/4d33f0b6-2083-4926-87bb-ea4c103c19d1-kube-api-access-j5rfp\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.708056 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5w25m" event={"ID":"2a66844e-4198-4176-a3b5-1a0cff251dff","Type":"ContainerDied","Data":"9e0a284010e887c0aa021094f1e8b4c100f687b731fa07ab21a5f387ab5fbc02"} Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.708099 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e0a284010e887c0aa021094f1e8b4c100f687b731fa07ab21a5f387ab5fbc02" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.708167 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5w25m" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.710822 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a082-account-create-q8ptq" event={"ID":"4d33f0b6-2083-4926-87bb-ea4c103c19d1","Type":"ContainerDied","Data":"266c4b799e520ba0ea3d00ca47fab9e1c360bd3d3171f4f4c4e6a379194ffdb5"} Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.710858 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="266c4b799e520ba0ea3d00ca47fab9e1c360bd3d3171f4f4c4e6a379194ffdb5" Nov 25 16:10:21 crc kubenswrapper[4879]: I1125 16:10:21.710925 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a082-account-create-q8ptq" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.179340 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-s724w"] Nov 25 16:10:23 crc kubenswrapper[4879]: E1125 16:10:23.179997 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a66844e-4198-4176-a3b5-1a0cff251dff" containerName="mariadb-database-create" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.180009 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a66844e-4198-4176-a3b5-1a0cff251dff" containerName="mariadb-database-create" Nov 25 16:10:23 crc kubenswrapper[4879]: E1125 16:10:23.180028 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d33f0b6-2083-4926-87bb-ea4c103c19d1" containerName="mariadb-account-create" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.180033 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d33f0b6-2083-4926-87bb-ea4c103c19d1" containerName="mariadb-account-create" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.182350 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d33f0b6-2083-4926-87bb-ea4c103c19d1" containerName="mariadb-account-create" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.182387 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a66844e-4198-4176-a3b5-1a0cff251dff" containerName="mariadb-database-create" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.183463 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.188766 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-bqg97" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.189686 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.203409 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-s724w"] Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.290160 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-combined-ca-bundle\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.290430 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkwsk\" (UniqueName: \"kubernetes.io/projected/cc563b11-2bd4-4f94-8264-c4accf7969c6-kube-api-access-tkwsk\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.290551 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-config-data\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.392821 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-config-data\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.393381 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-combined-ca-bundle\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.393450 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkwsk\" (UniqueName: \"kubernetes.io/projected/cc563b11-2bd4-4f94-8264-c4accf7969c6-kube-api-access-tkwsk\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.399664 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-combined-ca-bundle\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.399804 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-config-data\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.413550 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkwsk\" (UniqueName: \"kubernetes.io/projected/cc563b11-2bd4-4f94-8264-c4accf7969c6-kube-api-access-tkwsk\") pod \"heat-db-sync-s724w\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " pod="openstack/heat-db-sync-s724w" Nov 25 16:10:23 crc kubenswrapper[4879]: I1125 16:10:23.505983 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s724w" Nov 25 16:10:24 crc kubenswrapper[4879]: I1125 16:10:24.031348 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-s724w"] Nov 25 16:10:24 crc kubenswrapper[4879]: I1125 16:10:24.754323 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s724w" event={"ID":"cc563b11-2bd4-4f94-8264-c4accf7969c6","Type":"ContainerStarted","Data":"43f86c74061aacf2091bc242ca620f34462f68e8289e2147d5c61ecab4357cea"} Nov 25 16:10:27 crc kubenswrapper[4879]: I1125 16:10:27.028937 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:27 crc kubenswrapper[4879]: I1125 16:10:27.029380 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:30 crc kubenswrapper[4879]: I1125 16:10:30.821663 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s724w" event={"ID":"cc563b11-2bd4-4f94-8264-c4accf7969c6","Type":"ContainerStarted","Data":"c72ba6ce7c97c2215dea3746dfad87b93ef9329ee84a89fa5e0e5f9c41c73e49"} Nov 25 16:10:30 crc kubenswrapper[4879]: I1125 16:10:30.841268 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-s724w" podStartSLOduration=1.890177059 podStartE2EDuration="7.841250253s" podCreationTimestamp="2025-11-25 16:10:23 +0000 UTC" firstStartedPulling="2025-11-25 16:10:24.072756186 +0000 UTC m=+6315.676169247" lastFinishedPulling="2025-11-25 16:10:30.02382937 +0000 UTC m=+6321.627242441" observedRunningTime="2025-11-25 16:10:30.839782723 +0000 UTC m=+6322.443195794" watchObservedRunningTime="2025-11-25 16:10:30.841250253 +0000 UTC m=+6322.444663324" Nov 25 16:10:32 crc kubenswrapper[4879]: I1125 16:10:32.841389 4879 generic.go:334] "Generic (PLEG): container finished" podID="cc563b11-2bd4-4f94-8264-c4accf7969c6" containerID="c72ba6ce7c97c2215dea3746dfad87b93ef9329ee84a89fa5e0e5f9c41c73e49" exitCode=0 Nov 25 16:10:32 crc kubenswrapper[4879]: I1125 16:10:32.841781 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s724w" event={"ID":"cc563b11-2bd4-4f94-8264-c4accf7969c6","Type":"ContainerDied","Data":"c72ba6ce7c97c2215dea3746dfad87b93ef9329ee84a89fa5e0e5f9c41c73e49"} Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.219870 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s724w" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.238694 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-config-data\") pod \"cc563b11-2bd4-4f94-8264-c4accf7969c6\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.238766 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkwsk\" (UniqueName: \"kubernetes.io/projected/cc563b11-2bd4-4f94-8264-c4accf7969c6-kube-api-access-tkwsk\") pod \"cc563b11-2bd4-4f94-8264-c4accf7969c6\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.238990 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-combined-ca-bundle\") pod \"cc563b11-2bd4-4f94-8264-c4accf7969c6\" (UID: \"cc563b11-2bd4-4f94-8264-c4accf7969c6\") " Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.245518 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc563b11-2bd4-4f94-8264-c4accf7969c6-kube-api-access-tkwsk" (OuterVolumeSpecName: "kube-api-access-tkwsk") pod "cc563b11-2bd4-4f94-8264-c4accf7969c6" (UID: "cc563b11-2bd4-4f94-8264-c4accf7969c6"). InnerVolumeSpecName "kube-api-access-tkwsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.281025 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc563b11-2bd4-4f94-8264-c4accf7969c6" (UID: "cc563b11-2bd4-4f94-8264-c4accf7969c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.323897 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-config-data" (OuterVolumeSpecName: "config-data") pod "cc563b11-2bd4-4f94-8264-c4accf7969c6" (UID: "cc563b11-2bd4-4f94-8264-c4accf7969c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.341525 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.341755 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkwsk\" (UniqueName: \"kubernetes.io/projected/cc563b11-2bd4-4f94-8264-c4accf7969c6-kube-api-access-tkwsk\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.341767 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc563b11-2bd4-4f94-8264-c4accf7969c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.865631 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-s724w" event={"ID":"cc563b11-2bd4-4f94-8264-c4accf7969c6","Type":"ContainerDied","Data":"43f86c74061aacf2091bc242ca620f34462f68e8289e2147d5c61ecab4357cea"} Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.865670 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-s724w" Nov 25 16:10:34 crc kubenswrapper[4879]: I1125 16:10:34.865674 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43f86c74061aacf2091bc242ca620f34462f68e8289e2147d5c61ecab4357cea" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.805150 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-fcb9f8794-rb2q9"] Nov 25 16:10:35 crc kubenswrapper[4879]: E1125 16:10:35.806077 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc563b11-2bd4-4f94-8264-c4accf7969c6" containerName="heat-db-sync" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.806094 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc563b11-2bd4-4f94-8264-c4accf7969c6" containerName="heat-db-sync" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.806329 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc563b11-2bd4-4f94-8264-c4accf7969c6" containerName="heat-db-sync" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.807518 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.813866 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-bqg97" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.821742 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.822765 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.877906 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-fcb9f8794-rb2q9"] Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.965800 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6c4b655ff8-7sj9f"] Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.978732 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.981574 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987111 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-config-data-custom\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987153 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6c4b655ff8-7sj9f"] Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987157 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-config-data-custom\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987226 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t659j\" (UniqueName: \"kubernetes.io/projected/574d8787-8b21-448d-b228-fbd1b54f30be-kube-api-access-t659j\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987257 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-combined-ca-bundle\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987394 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-config-data\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987427 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmwzm\" (UniqueName: \"kubernetes.io/projected/a2adfe45-39e8-43bd-8987-5109e1f80200-kube-api-access-qmwzm\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987495 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-combined-ca-bundle\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:35 crc kubenswrapper[4879]: I1125 16:10:35.987526 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-config-data\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.006137 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-67959fbf64-b2h9v"] Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.008714 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.012400 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.022456 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-67959fbf64-b2h9v"] Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.089416 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-config-data-custom\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.089670 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-config-data-custom\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.089703 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t659j\" (UniqueName: \"kubernetes.io/projected/574d8787-8b21-448d-b228-fbd1b54f30be-kube-api-access-t659j\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.090533 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-combined-ca-bundle\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.090707 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-config-data\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.090738 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmwzm\" (UniqueName: \"kubernetes.io/projected/a2adfe45-39e8-43bd-8987-5109e1f80200-kube-api-access-qmwzm\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.090799 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-combined-ca-bundle\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.090820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-config-data\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.098262 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-config-data-custom\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.103032 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-combined-ca-bundle\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.113984 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-combined-ca-bundle\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.114220 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-config-data\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.114175 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2adfe45-39e8-43bd-8987-5109e1f80200-config-data\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.114272 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/574d8787-8b21-448d-b228-fbd1b54f30be-config-data-custom\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.117827 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmwzm\" (UniqueName: \"kubernetes.io/projected/a2adfe45-39e8-43bd-8987-5109e1f80200-kube-api-access-qmwzm\") pod \"heat-engine-fcb9f8794-rb2q9\" (UID: \"a2adfe45-39e8-43bd-8987-5109e1f80200\") " pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.123014 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t659j\" (UniqueName: \"kubernetes.io/projected/574d8787-8b21-448d-b228-fbd1b54f30be-kube-api-access-t659j\") pod \"heat-cfnapi-6c4b655ff8-7sj9f\" (UID: \"574d8787-8b21-448d-b228-fbd1b54f30be\") " pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.153694 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.192266 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-665jk\" (UniqueName: \"kubernetes.io/projected/b137cdf4-7328-4e8b-b8b7-015e8094122c-kube-api-access-665jk\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.192315 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-config-data-custom\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.192357 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-config-data\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.192508 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-combined-ca-bundle\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.296733 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-combined-ca-bundle\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.297182 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-665jk\" (UniqueName: \"kubernetes.io/projected/b137cdf4-7328-4e8b-b8b7-015e8094122c-kube-api-access-665jk\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.297228 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-config-data-custom\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.297313 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-config-data\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.304331 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-combined-ca-bundle\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.305028 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-config-data-custom\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.310256 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b137cdf4-7328-4e8b-b8b7-015e8094122c-config-data\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.310475 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.317974 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-665jk\" (UniqueName: \"kubernetes.io/projected/b137cdf4-7328-4e8b-b8b7-015e8094122c-kube-api-access-665jk\") pod \"heat-api-67959fbf64-b2h9v\" (UID: \"b137cdf4-7328-4e8b-b8b7-015e8094122c\") " pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.351689 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.661011 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-fcb9f8794-rb2q9"] Nov 25 16:10:36 crc kubenswrapper[4879]: W1125 16:10:36.666314 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2adfe45_39e8_43bd_8987_5109e1f80200.slice/crio-85ad65c62f0707275aaed0fb4c60fb55c2e31a80f9421e2dbb6e702130533c70 WatchSource:0}: Error finding container 85ad65c62f0707275aaed0fb4c60fb55c2e31a80f9421e2dbb6e702130533c70: Status 404 returned error can't find the container with id 85ad65c62f0707275aaed0fb4c60fb55c2e31a80f9421e2dbb6e702130533c70 Nov 25 16:10:36 crc kubenswrapper[4879]: W1125 16:10:36.876395 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod574d8787_8b21_448d_b228_fbd1b54f30be.slice/crio-af3bf41405b9e4c57d788501c589dbc963c44f58cd3a6b89311c15ae00f04627 WatchSource:0}: Error finding container af3bf41405b9e4c57d788501c589dbc963c44f58cd3a6b89311c15ae00f04627: Status 404 returned error can't find the container with id af3bf41405b9e4c57d788501c589dbc963c44f58cd3a6b89311c15ae00f04627 Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.876750 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6c4b655ff8-7sj9f"] Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.887231 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-67959fbf64-b2h9v"] Nov 25 16:10:36 crc kubenswrapper[4879]: W1125 16:10:36.887345 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb137cdf4_7328_4e8b_b8b7_015e8094122c.slice/crio-605355e4d08f3096a4b726818f5f7b3666c45a07d8f508a80aef21f7b82223c9 WatchSource:0}: Error finding container 605355e4d08f3096a4b726818f5f7b3666c45a07d8f508a80aef21f7b82223c9: Status 404 returned error can't find the container with id 605355e4d08f3096a4b726818f5f7b3666c45a07d8f508a80aef21f7b82223c9 Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.922631 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" event={"ID":"574d8787-8b21-448d-b228-fbd1b54f30be","Type":"ContainerStarted","Data":"af3bf41405b9e4c57d788501c589dbc963c44f58cd3a6b89311c15ae00f04627"} Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.923904 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-67959fbf64-b2h9v" event={"ID":"b137cdf4-7328-4e8b-b8b7-015e8094122c","Type":"ContainerStarted","Data":"605355e4d08f3096a4b726818f5f7b3666c45a07d8f508a80aef21f7b82223c9"} Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.925791 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-fcb9f8794-rb2q9" event={"ID":"a2adfe45-39e8-43bd-8987-5109e1f80200","Type":"ContainerStarted","Data":"a19e83059e910b5ed4295be4220ce793ff318b4bcc1ca0d5ac23bcbd79f22e1f"} Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.925837 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-fcb9f8794-rb2q9" event={"ID":"a2adfe45-39e8-43bd-8987-5109e1f80200","Type":"ContainerStarted","Data":"85ad65c62f0707275aaed0fb4c60fb55c2e31a80f9421e2dbb6e702130533c70"} Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.926003 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:36 crc kubenswrapper[4879]: I1125 16:10:36.949696 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-fcb9f8794-rb2q9" podStartSLOduration=1.949674648 podStartE2EDuration="1.949674648s" podCreationTimestamp="2025-11-25 16:10:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:10:36.943026797 +0000 UTC m=+6328.546439868" watchObservedRunningTime="2025-11-25 16:10:36.949674648 +0000 UTC m=+6328.553087719" Nov 25 16:10:37 crc kubenswrapper[4879]: I1125 16:10:37.031021 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dfc55cbfc-rlmwz" podUID="1368a223-0bf5-4797-b790-993da5053700" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.122:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.122:8080: connect: connection refused" Nov 25 16:10:39 crc kubenswrapper[4879]: I1125 16:10:39.957258 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" event={"ID":"574d8787-8b21-448d-b228-fbd1b54f30be","Type":"ContainerStarted","Data":"6a7f5a14bcce98c7d4a4bcf4f54861b4c8f927f0d672434eeac6543b52dbfa21"} Nov 25 16:10:39 crc kubenswrapper[4879]: I1125 16:10:39.958103 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:39 crc kubenswrapper[4879]: I1125 16:10:39.959515 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-67959fbf64-b2h9v" event={"ID":"b137cdf4-7328-4e8b-b8b7-015e8094122c","Type":"ContainerStarted","Data":"bdacbe3a6013ff921b413a9a10409c7c69f6d3743c0ec71f6e767a41205acfb7"} Nov 25 16:10:39 crc kubenswrapper[4879]: I1125 16:10:39.959664 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:39 crc kubenswrapper[4879]: I1125 16:10:39.978584 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" podStartSLOduration=2.9241464219999997 podStartE2EDuration="4.978565563s" podCreationTimestamp="2025-11-25 16:10:35 +0000 UTC" firstStartedPulling="2025-11-25 16:10:36.8797601 +0000 UTC m=+6328.483173171" lastFinishedPulling="2025-11-25 16:10:38.934179241 +0000 UTC m=+6330.537592312" observedRunningTime="2025-11-25 16:10:39.974474421 +0000 UTC m=+6331.577887492" watchObservedRunningTime="2025-11-25 16:10:39.978565563 +0000 UTC m=+6331.581978634" Nov 25 16:10:40 crc kubenswrapper[4879]: I1125 16:10:40.001916 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-67959fbf64-b2h9v" podStartSLOduration=2.9543486420000002 podStartE2EDuration="5.001886946s" podCreationTimestamp="2025-11-25 16:10:35 +0000 UTC" firstStartedPulling="2025-11-25 16:10:36.889450293 +0000 UTC m=+6328.492863364" lastFinishedPulling="2025-11-25 16:10:38.936988597 +0000 UTC m=+6330.540401668" observedRunningTime="2025-11-25 16:10:39.993022395 +0000 UTC m=+6331.596435466" watchObservedRunningTime="2025-11-25 16:10:40.001886946 +0000 UTC m=+6331.605300017" Nov 25 16:10:46 crc kubenswrapper[4879]: I1125 16:10:46.182057 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-fcb9f8794-rb2q9" Nov 25 16:10:47 crc kubenswrapper[4879]: I1125 16:10:47.692419 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-6c4b655ff8-7sj9f" Nov 25 16:10:47 crc kubenswrapper[4879]: I1125 16:10:47.699165 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-67959fbf64-b2h9v" Nov 25 16:10:49 crc kubenswrapper[4879]: I1125 16:10:49.138581 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:50 crc kubenswrapper[4879]: I1125 16:10:50.903388 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-dfc55cbfc-rlmwz" Nov 25 16:10:50 crc kubenswrapper[4879]: I1125 16:10:50.975430 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-78f78d4bdf-qsbw9"] Nov 25 16:10:50 crc kubenswrapper[4879]: I1125 16:10:50.975697 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-78f78d4bdf-qsbw9" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon-log" containerID="cri-o://79a65d26ff192841236f5aebed0585def513dec41f63a9767adf52e0d83e6221" gracePeriod=30 Nov 25 16:10:50 crc kubenswrapper[4879]: I1125 16:10:50.976321 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-78f78d4bdf-qsbw9" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" containerID="cri-o://b0d23bb60475d8f3dca2ba9205760b527c3961549670b3cafc941b33fd090ac6" gracePeriod=30 Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.078271 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-tnxpp"] Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.091712 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-e0b3-account-create-6xh9f"] Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.108373 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-e0b3-account-create-6xh9f"] Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.124391 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-tnxpp"] Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.659383 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a565eaa-22b7-4c01-a928-1f47d507d05c" path="/var/lib/kubelet/pods/1a565eaa-22b7-4c01-a928-1f47d507d05c/volumes" Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.662384 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a66d36f1-0990-49b9-acbb-70adf1b0567d" path="/var/lib/kubelet/pods/a66d36f1-0990-49b9-acbb-70adf1b0567d/volumes" Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.845725 4879 scope.go:117] "RemoveContainer" containerID="3e340d3ed4e470268fc941715696f0a7cdd50f54533ddd573c8b694c61e86dae" Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.879018 4879 scope.go:117] "RemoveContainer" containerID="640b4a564a454df72f410b090d73a51aee71874ab7b9e5edbab39feebe993720" Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.928576 4879 scope.go:117] "RemoveContainer" containerID="135a0c5d0f70e788a68d3b450162be7a3dfa02f4c0f2ff410c6332e24a77ea3c" Nov 25 16:10:51 crc kubenswrapper[4879]: I1125 16:10:51.969614 4879 scope.go:117] "RemoveContainer" containerID="8fd5412b15d873c8b1171efaff172adf66703c189a7b1e222d8885297e240f99" Nov 25 16:10:52 crc kubenswrapper[4879]: I1125 16:10:52.020227 4879 scope.go:117] "RemoveContainer" containerID="584abaf843c718518d542e052391d201884def52b90c22054ec4e7f226db44b3" Nov 25 16:10:52 crc kubenswrapper[4879]: I1125 16:10:52.064346 4879 scope.go:117] "RemoveContainer" containerID="eddbbb6b3634367b4b3d07df5c64316f91335d028126ea0057936d07c228b88b" Nov 25 16:10:52 crc kubenswrapper[4879]: I1125 16:10:52.098806 4879 scope.go:117] "RemoveContainer" containerID="9d7af6e22b3298dbd6185ae66e5eec9b6f504a46a96dd60d6986d7bd10fbbcaa" Nov 25 16:10:55 crc kubenswrapper[4879]: I1125 16:10:55.139532 4879 generic.go:334] "Generic (PLEG): container finished" podID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerID="b0d23bb60475d8f3dca2ba9205760b527c3961549670b3cafc941b33fd090ac6" exitCode=0 Nov 25 16:10:55 crc kubenswrapper[4879]: I1125 16:10:55.139603 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78f78d4bdf-qsbw9" event={"ID":"130a3c03-aef1-48df-be5c-d36902fe21b6","Type":"ContainerDied","Data":"b0d23bb60475d8f3dca2ba9205760b527c3961549670b3cafc941b33fd090ac6"} Nov 25 16:10:59 crc kubenswrapper[4879]: I1125 16:10:59.675618 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-78f78d4bdf-qsbw9" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.118:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.118:8080: connect: connection refused" Nov 25 16:11:00 crc kubenswrapper[4879]: I1125 16:11:00.039274 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-7dhtd"] Nov 25 16:11:00 crc kubenswrapper[4879]: I1125 16:11:00.050550 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-7dhtd"] Nov 25 16:11:01 crc kubenswrapper[4879]: I1125 16:11:01.665660 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a44596ad-b5bf-4132-b099-ed1ac235324f" path="/var/lib/kubelet/pods/a44596ad-b5bf-4132-b099-ed1ac235324f/volumes" Nov 25 16:11:04 crc kubenswrapper[4879]: I1125 16:11:04.937760 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9"] Nov 25 16:11:04 crc kubenswrapper[4879]: I1125 16:11:04.940536 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:04 crc kubenswrapper[4879]: I1125 16:11:04.942522 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 25 16:11:04 crc kubenswrapper[4879]: I1125 16:11:04.948464 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9"] Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.118655 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.119068 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.119263 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b7dh\" (UniqueName: \"kubernetes.io/projected/320cbc1f-7c52-4487-80f2-3397d3abdb86-kube-api-access-4b7dh\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.222308 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.222409 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.222598 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b7dh\" (UniqueName: \"kubernetes.io/projected/320cbc1f-7c52-4487-80f2-3397d3abdb86-kube-api-access-4b7dh\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.222884 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.223005 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.243510 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b7dh\" (UniqueName: \"kubernetes.io/projected/320cbc1f-7c52-4487-80f2-3397d3abdb86-kube-api-access-4b7dh\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.262098 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:05 crc kubenswrapper[4879]: I1125 16:11:05.701902 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9"] Nov 25 16:11:06 crc kubenswrapper[4879]: I1125 16:11:06.268420 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerStarted","Data":"edd9d6f1365a4342458861a63560ce907708f74a002bf1af5cff37eef2647efc"} Nov 25 16:11:06 crc kubenswrapper[4879]: I1125 16:11:06.268788 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerStarted","Data":"de64960ee8a680e4fd4c2a247521d543a67036c877e85bdb64017d62d4d1c2bf"} Nov 25 16:11:09 crc kubenswrapper[4879]: I1125 16:11:09.299437 4879 generic.go:334] "Generic (PLEG): container finished" podID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerID="edd9d6f1365a4342458861a63560ce907708f74a002bf1af5cff37eef2647efc" exitCode=0 Nov 25 16:11:09 crc kubenswrapper[4879]: I1125 16:11:09.299563 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerDied","Data":"edd9d6f1365a4342458861a63560ce907708f74a002bf1af5cff37eef2647efc"} Nov 25 16:11:09 crc kubenswrapper[4879]: I1125 16:11:09.675965 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-78f78d4bdf-qsbw9" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.118:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.118:8080: connect: connection refused" Nov 25 16:11:14 crc kubenswrapper[4879]: I1125 16:11:14.346188 4879 generic.go:334] "Generic (PLEG): container finished" podID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerID="bdf2b606ed1a2f7740f74d8f533a95f19a6f01288bd173ac46e0166bda94a82a" exitCode=0 Nov 25 16:11:14 crc kubenswrapper[4879]: I1125 16:11:14.346271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerDied","Data":"bdf2b606ed1a2f7740f74d8f533a95f19a6f01288bd173ac46e0166bda94a82a"} Nov 25 16:11:15 crc kubenswrapper[4879]: I1125 16:11:15.358342 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerStarted","Data":"c61e070ddd4bb1f0e4ae9414748184886077c4b40f9ea1183a27f055d92fc30d"} Nov 25 16:11:15 crc kubenswrapper[4879]: I1125 16:11:15.386262 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" podStartSLOduration=7.263829021 podStartE2EDuration="11.386244602s" podCreationTimestamp="2025-11-25 16:11:04 +0000 UTC" firstStartedPulling="2025-11-25 16:11:09.302084285 +0000 UTC m=+6360.905497356" lastFinishedPulling="2025-11-25 16:11:13.424499866 +0000 UTC m=+6365.027912937" observedRunningTime="2025-11-25 16:11:15.374264337 +0000 UTC m=+6366.977677398" watchObservedRunningTime="2025-11-25 16:11:15.386244602 +0000 UTC m=+6366.989657673" Nov 25 16:11:16 crc kubenswrapper[4879]: I1125 16:11:16.369712 4879 generic.go:334] "Generic (PLEG): container finished" podID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerID="c61e070ddd4bb1f0e4ae9414748184886077c4b40f9ea1183a27f055d92fc30d" exitCode=0 Nov 25 16:11:16 crc kubenswrapper[4879]: I1125 16:11:16.369815 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerDied","Data":"c61e070ddd4bb1f0e4ae9414748184886077c4b40f9ea1183a27f055d92fc30d"} Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.758773 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.874027 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4b7dh\" (UniqueName: \"kubernetes.io/projected/320cbc1f-7c52-4487-80f2-3397d3abdb86-kube-api-access-4b7dh\") pod \"320cbc1f-7c52-4487-80f2-3397d3abdb86\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.874586 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-util\") pod \"320cbc1f-7c52-4487-80f2-3397d3abdb86\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.874660 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-bundle\") pod \"320cbc1f-7c52-4487-80f2-3397d3abdb86\" (UID: \"320cbc1f-7c52-4487-80f2-3397d3abdb86\") " Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.876593 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-bundle" (OuterVolumeSpecName: "bundle") pod "320cbc1f-7c52-4487-80f2-3397d3abdb86" (UID: "320cbc1f-7c52-4487-80f2-3397d3abdb86"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.880366 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/320cbc1f-7c52-4487-80f2-3397d3abdb86-kube-api-access-4b7dh" (OuterVolumeSpecName: "kube-api-access-4b7dh") pod "320cbc1f-7c52-4487-80f2-3397d3abdb86" (UID: "320cbc1f-7c52-4487-80f2-3397d3abdb86"). InnerVolumeSpecName "kube-api-access-4b7dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.885972 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-util" (OuterVolumeSpecName: "util") pod "320cbc1f-7c52-4487-80f2-3397d3abdb86" (UID: "320cbc1f-7c52-4487-80f2-3397d3abdb86"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.977500 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4b7dh\" (UniqueName: \"kubernetes.io/projected/320cbc1f-7c52-4487-80f2-3397d3abdb86-kube-api-access-4b7dh\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.977550 4879 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-util\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:17 crc kubenswrapper[4879]: I1125 16:11:17.977564 4879 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/320cbc1f-7c52-4487-80f2-3397d3abdb86-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:18 crc kubenswrapper[4879]: I1125 16:11:18.389678 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" event={"ID":"320cbc1f-7c52-4487-80f2-3397d3abdb86","Type":"ContainerDied","Data":"de64960ee8a680e4fd4c2a247521d543a67036c877e85bdb64017d62d4d1c2bf"} Nov 25 16:11:18 crc kubenswrapper[4879]: I1125 16:11:18.389720 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de64960ee8a680e4fd4c2a247521d543a67036c877e85bdb64017d62d4d1c2bf" Nov 25 16:11:18 crc kubenswrapper[4879]: I1125 16:11:18.389791 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9" Nov 25 16:11:19 crc kubenswrapper[4879]: I1125 16:11:19.675668 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-78f78d4bdf-qsbw9" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.118:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.118:8080: connect: connection refused" Nov 25 16:11:19 crc kubenswrapper[4879]: I1125 16:11:19.675788 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:11:21 crc kubenswrapper[4879]: I1125 16:11:21.417346 4879 generic.go:334] "Generic (PLEG): container finished" podID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerID="79a65d26ff192841236f5aebed0585def513dec41f63a9767adf52e0d83e6221" exitCode=137 Nov 25 16:11:21 crc kubenswrapper[4879]: I1125 16:11:21.417410 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78f78d4bdf-qsbw9" event={"ID":"130a3c03-aef1-48df-be5c-d36902fe21b6","Type":"ContainerDied","Data":"79a65d26ff192841236f5aebed0585def513dec41f63a9767adf52e0d83e6221"} Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.023564 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.065145 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-config-data\") pod \"130a3c03-aef1-48df-be5c-d36902fe21b6\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.065180 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f9fg\" (UniqueName: \"kubernetes.io/projected/130a3c03-aef1-48df-be5c-d36902fe21b6-kube-api-access-7f9fg\") pod \"130a3c03-aef1-48df-be5c-d36902fe21b6\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.065289 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-scripts\") pod \"130a3c03-aef1-48df-be5c-d36902fe21b6\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.065323 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/130a3c03-aef1-48df-be5c-d36902fe21b6-horizon-secret-key\") pod \"130a3c03-aef1-48df-be5c-d36902fe21b6\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.065420 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/130a3c03-aef1-48df-be5c-d36902fe21b6-logs\") pod \"130a3c03-aef1-48df-be5c-d36902fe21b6\" (UID: \"130a3c03-aef1-48df-be5c-d36902fe21b6\") " Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.069764 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/130a3c03-aef1-48df-be5c-d36902fe21b6-logs" (OuterVolumeSpecName: "logs") pod "130a3c03-aef1-48df-be5c-d36902fe21b6" (UID: "130a3c03-aef1-48df-be5c-d36902fe21b6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.074491 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/130a3c03-aef1-48df-be5c-d36902fe21b6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "130a3c03-aef1-48df-be5c-d36902fe21b6" (UID: "130a3c03-aef1-48df-be5c-d36902fe21b6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.077325 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/130a3c03-aef1-48df-be5c-d36902fe21b6-kube-api-access-7f9fg" (OuterVolumeSpecName: "kube-api-access-7f9fg") pod "130a3c03-aef1-48df-be5c-d36902fe21b6" (UID: "130a3c03-aef1-48df-be5c-d36902fe21b6"). InnerVolumeSpecName "kube-api-access-7f9fg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.106071 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-scripts" (OuterVolumeSpecName: "scripts") pod "130a3c03-aef1-48df-be5c-d36902fe21b6" (UID: "130a3c03-aef1-48df-be5c-d36902fe21b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.135163 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-config-data" (OuterVolumeSpecName: "config-data") pod "130a3c03-aef1-48df-be5c-d36902fe21b6" (UID: "130a3c03-aef1-48df-be5c-d36902fe21b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.167613 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f9fg\" (UniqueName: \"kubernetes.io/projected/130a3c03-aef1-48df-be5c-d36902fe21b6-kube-api-access-7f9fg\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.167651 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.167665 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/130a3c03-aef1-48df-be5c-d36902fe21b6-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.167677 4879 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/130a3c03-aef1-48df-be5c-d36902fe21b6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.167689 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/130a3c03-aef1-48df-be5c-d36902fe21b6-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.428484 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78f78d4bdf-qsbw9" event={"ID":"130a3c03-aef1-48df-be5c-d36902fe21b6","Type":"ContainerDied","Data":"ee2024b5ce471c904f143e921fc03a12b4c21249200fcb71a63be6c56975d822"} Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.428541 4879 scope.go:117] "RemoveContainer" containerID="b0d23bb60475d8f3dca2ba9205760b527c3961549670b3cafc941b33fd090ac6" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.428741 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78f78d4bdf-qsbw9" Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.492493 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-78f78d4bdf-qsbw9"] Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.506961 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-78f78d4bdf-qsbw9"] Nov 25 16:11:22 crc kubenswrapper[4879]: I1125 16:11:22.637986 4879 scope.go:117] "RemoveContainer" containerID="79a65d26ff192841236f5aebed0585def513dec41f63a9767adf52e0d83e6221" Nov 25 16:11:23 crc kubenswrapper[4879]: I1125 16:11:23.659642 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" path="/var/lib/kubelet/pods/130a3c03-aef1-48df-be5c-d36902fe21b6/volumes" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.345262 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj"] Nov 25 16:11:28 crc kubenswrapper[4879]: E1125 16:11:28.353678 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="extract" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.353714 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="extract" Nov 25 16:11:28 crc kubenswrapper[4879]: E1125 16:11:28.353737 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="pull" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.353743 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="pull" Nov 25 16:11:28 crc kubenswrapper[4879]: E1125 16:11:28.353758 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon-log" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.353767 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon-log" Nov 25 16:11:28 crc kubenswrapper[4879]: E1125 16:11:28.353793 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.353799 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" Nov 25 16:11:28 crc kubenswrapper[4879]: E1125 16:11:28.353834 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="util" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.353841 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="util" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.354174 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.354199 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="130a3c03-aef1-48df-be5c-d36902fe21b6" containerName="horizon-log" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.354212 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="320cbc1f-7c52-4487-80f2-3397d3abdb86" containerName="extract" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.354990 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.359835 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-95db9" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.360107 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.360326 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.389436 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.406730 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.409066 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.415440 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-5jr8c" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.416020 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.421075 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.423051 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.472105 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.483286 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.497066 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl6z7\" (UniqueName: \"kubernetes.io/projected/8228bbf9-6bfa-4f8c-a327-7e94cd658cfe-kube-api-access-gl6z7\") pod \"obo-prometheus-operator-668cf9dfbb-sgbxj\" (UID: \"8228bbf9-6bfa-4f8c-a327-7e94cd658cfe\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.595134 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-8dgzm"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.601456 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2af2c70b-ddca-4b26-997e-b8e1cb054796-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz\" (UID: \"2af2c70b-ddca-4b26-997e-b8e1cb054796\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.601508 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad6ade56-ff61-497e-944c-df0d2e9519e2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v\" (UID: \"ad6ade56-ff61-497e-944c-df0d2e9519e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.601571 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad6ade56-ff61-497e-944c-df0d2e9519e2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v\" (UID: \"ad6ade56-ff61-497e-944c-df0d2e9519e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.601661 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl6z7\" (UniqueName: \"kubernetes.io/projected/8228bbf9-6bfa-4f8c-a327-7e94cd658cfe-kube-api-access-gl6z7\") pod \"obo-prometheus-operator-668cf9dfbb-sgbxj\" (UID: \"8228bbf9-6bfa-4f8c-a327-7e94cd658cfe\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.601725 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2af2c70b-ddca-4b26-997e-b8e1cb054796-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz\" (UID: \"2af2c70b-ddca-4b26-997e-b8e1cb054796\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.623025 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.625827 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-qcnrd" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.625973 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.628734 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl6z7\" (UniqueName: \"kubernetes.io/projected/8228bbf9-6bfa-4f8c-a327-7e94cd658cfe-kube-api-access-gl6z7\") pod \"obo-prometheus-operator-668cf9dfbb-sgbxj\" (UID: \"8228bbf9-6bfa-4f8c-a327-7e94cd658cfe\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.635185 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-8dgzm"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.685565 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.706492 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2af2c70b-ddca-4b26-997e-b8e1cb054796-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz\" (UID: \"2af2c70b-ddca-4b26-997e-b8e1cb054796\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.706554 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad6ade56-ff61-497e-944c-df0d2e9519e2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v\" (UID: \"ad6ade56-ff61-497e-944c-df0d2e9519e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.706629 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad6ade56-ff61-497e-944c-df0d2e9519e2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v\" (UID: \"ad6ade56-ff61-497e-944c-df0d2e9519e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.706778 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2af2c70b-ddca-4b26-997e-b8e1cb054796-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz\" (UID: \"2af2c70b-ddca-4b26-997e-b8e1cb054796\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.713731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2af2c70b-ddca-4b26-997e-b8e1cb054796-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz\" (UID: \"2af2c70b-ddca-4b26-997e-b8e1cb054796\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.713826 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ad6ade56-ff61-497e-944c-df0d2e9519e2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v\" (UID: \"ad6ade56-ff61-497e-944c-df0d2e9519e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.713869 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2af2c70b-ddca-4b26-997e-b8e1cb054796-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz\" (UID: \"2af2c70b-ddca-4b26-997e-b8e1cb054796\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.716511 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ad6ade56-ff61-497e-944c-df0d2e9519e2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v\" (UID: \"ad6ade56-ff61-497e-944c-df0d2e9519e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.732468 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.758854 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.779633 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7v627"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.781107 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.784539 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-8w8q6" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.797017 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7v627"] Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.810041 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/98180543-60cf-4cb7-bfc2-6ae3ce782dc1-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-8dgzm\" (UID: \"98180543-60cf-4cb7-bfc2-6ae3ce782dc1\") " pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.810109 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95pgh\" (UniqueName: \"kubernetes.io/projected/98180543-60cf-4cb7-bfc2-6ae3ce782dc1-kube-api-access-95pgh\") pod \"observability-operator-d8bb48f5d-8dgzm\" (UID: \"98180543-60cf-4cb7-bfc2-6ae3ce782dc1\") " pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.912247 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/98180543-60cf-4cb7-bfc2-6ae3ce782dc1-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-8dgzm\" (UID: \"98180543-60cf-4cb7-bfc2-6ae3ce782dc1\") " pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.912300 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95pgh\" (UniqueName: \"kubernetes.io/projected/98180543-60cf-4cb7-bfc2-6ae3ce782dc1-kube-api-access-95pgh\") pod \"observability-operator-d8bb48f5d-8dgzm\" (UID: \"98180543-60cf-4cb7-bfc2-6ae3ce782dc1\") " pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.912395 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc239cf3-61a6-4f20-9f66-a40e68fedd66-openshift-service-ca\") pod \"perses-operator-5446b9c989-7v627\" (UID: \"cc239cf3-61a6-4f20-9f66-a40e68fedd66\") " pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.912505 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhvwv\" (UniqueName: \"kubernetes.io/projected/cc239cf3-61a6-4f20-9f66-a40e68fedd66-kube-api-access-nhvwv\") pod \"perses-operator-5446b9c989-7v627\" (UID: \"cc239cf3-61a6-4f20-9f66-a40e68fedd66\") " pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.916542 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/98180543-60cf-4cb7-bfc2-6ae3ce782dc1-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-8dgzm\" (UID: \"98180543-60cf-4cb7-bfc2-6ae3ce782dc1\") " pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.947282 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95pgh\" (UniqueName: \"kubernetes.io/projected/98180543-60cf-4cb7-bfc2-6ae3ce782dc1-kube-api-access-95pgh\") pod \"observability-operator-d8bb48f5d-8dgzm\" (UID: \"98180543-60cf-4cb7-bfc2-6ae3ce782dc1\") " pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:28 crc kubenswrapper[4879]: I1125 16:11:28.985938 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.014168 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc239cf3-61a6-4f20-9f66-a40e68fedd66-openshift-service-ca\") pod \"perses-operator-5446b9c989-7v627\" (UID: \"cc239cf3-61a6-4f20-9f66-a40e68fedd66\") " pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.014278 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhvwv\" (UniqueName: \"kubernetes.io/projected/cc239cf3-61a6-4f20-9f66-a40e68fedd66-kube-api-access-nhvwv\") pod \"perses-operator-5446b9c989-7v627\" (UID: \"cc239cf3-61a6-4f20-9f66-a40e68fedd66\") " pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.015783 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc239cf3-61a6-4f20-9f66-a40e68fedd66-openshift-service-ca\") pod \"perses-operator-5446b9c989-7v627\" (UID: \"cc239cf3-61a6-4f20-9f66-a40e68fedd66\") " pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.035053 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhvwv\" (UniqueName: \"kubernetes.io/projected/cc239cf3-61a6-4f20-9f66-a40e68fedd66-kube-api-access-nhvwv\") pod \"perses-operator-5446b9c989-7v627\" (UID: \"cc239cf3-61a6-4f20-9f66-a40e68fedd66\") " pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.216061 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.689430 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj"] Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.748956 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v"] Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.799399 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz"] Nov 25 16:11:29 crc kubenswrapper[4879]: W1125 16:11:29.805447 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98180543_60cf_4cb7_bfc2_6ae3ce782dc1.slice/crio-82fdaee6201b0858a8581f2808fd3d0167161153a6b5ccbd2039dfa22f741f97 WatchSource:0}: Error finding container 82fdaee6201b0858a8581f2808fd3d0167161153a6b5ccbd2039dfa22f741f97: Status 404 returned error can't find the container with id 82fdaee6201b0858a8581f2808fd3d0167161153a6b5ccbd2039dfa22f741f97 Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.810429 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-8dgzm"] Nov 25 16:11:29 crc kubenswrapper[4879]: I1125 16:11:29.975924 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7v627"] Nov 25 16:11:29 crc kubenswrapper[4879]: W1125 16:11:29.979453 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc239cf3_61a6_4f20_9f66_a40e68fedd66.slice/crio-a977ea76c3b639f738ee7e175820b935b8424f12190d6c6ef4d0df5c1430f212 WatchSource:0}: Error finding container a977ea76c3b639f738ee7e175820b935b8424f12190d6c6ef4d0df5c1430f212: Status 404 returned error can't find the container with id a977ea76c3b639f738ee7e175820b935b8424f12190d6c6ef4d0df5c1430f212 Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.052667 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e580-account-create-t2scz"] Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.068622 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tskj4"] Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.078375 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e580-account-create-t2scz"] Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.087342 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tskj4"] Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.528390 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-7v627" event={"ID":"cc239cf3-61a6-4f20-9f66-a40e68fedd66","Type":"ContainerStarted","Data":"a977ea76c3b639f738ee7e175820b935b8424f12190d6c6ef4d0df5c1430f212"} Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.538886 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" event={"ID":"8228bbf9-6bfa-4f8c-a327-7e94cd658cfe","Type":"ContainerStarted","Data":"29ec0324c5d0d225c0f59e9d05799dfdfff0aca7d783ab739f5afa563ab336c6"} Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.556476 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" event={"ID":"2af2c70b-ddca-4b26-997e-b8e1cb054796","Type":"ContainerStarted","Data":"38f67dff016bffda26a80a15f96ef22a35ccc792c9b79b008225d971faccfa12"} Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.574550 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" event={"ID":"ad6ade56-ff61-497e-944c-df0d2e9519e2","Type":"ContainerStarted","Data":"87e100692e532cd2a3d5f7185a9c5da73586395b6533a6a66509d924f225fb48"} Nov 25 16:11:30 crc kubenswrapper[4879]: I1125 16:11:30.578177 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" event={"ID":"98180543-60cf-4cb7-bfc2-6ae3ce782dc1","Type":"ContainerStarted","Data":"82fdaee6201b0858a8581f2808fd3d0167161153a6b5ccbd2039dfa22f741f97"} Nov 25 16:11:31 crc kubenswrapper[4879]: I1125 16:11:31.669399 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1189794a-0f7c-44fa-8741-1f6c95c642c3" path="/var/lib/kubelet/pods/1189794a-0f7c-44fa-8741-1f6c95c642c3/volumes" Nov 25 16:11:31 crc kubenswrapper[4879]: I1125 16:11:31.670741 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a71a42f-4b21-49d2-b873-c95a576ddcab" path="/var/lib/kubelet/pods/2a71a42f-4b21-49d2-b873-c95a576ddcab/volumes" Nov 25 16:11:36 crc kubenswrapper[4879]: I1125 16:11:36.071942 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-zw6jh"] Nov 25 16:11:36 crc kubenswrapper[4879]: I1125 16:11:36.093406 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-zw6jh"] Nov 25 16:11:37 crc kubenswrapper[4879]: I1125 16:11:37.659239 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b8f43aa-7cc1-4a2c-9e64-292a98bc5455" path="/var/lib/kubelet/pods/7b8f43aa-7cc1-4a2c-9e64-292a98bc5455/volumes" Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.706263 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" event={"ID":"ad6ade56-ff61-497e-944c-df0d2e9519e2","Type":"ContainerStarted","Data":"1bf2c07e4384086fe75d19deac57a807b4c044241652f9dff2eba3a93a5a1fd2"} Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.714224 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-7v627" event={"ID":"cc239cf3-61a6-4f20-9f66-a40e68fedd66","Type":"ContainerStarted","Data":"c6e58753766dae7335fe397b5eca2f87638dac455c778fbbd3c566173e125034"} Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.714600 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.717751 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" event={"ID":"8228bbf9-6bfa-4f8c-a327-7e94cd658cfe","Type":"ContainerStarted","Data":"aad095644319e649a2ce48916fc6a7248951c0965fd0c266f56623a885a12379"} Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.720048 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" event={"ID":"2af2c70b-ddca-4b26-997e-b8e1cb054796","Type":"ContainerStarted","Data":"d1eb46d3017d8608398505056d007e64c7d32c7ef99c58774f8ccd65ef8c2bb5"} Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.777457 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-sgbxj" podStartSLOduration=2.862728407 podStartE2EDuration="11.777436835s" podCreationTimestamp="2025-11-25 16:11:28 +0000 UTC" firstStartedPulling="2025-11-25 16:11:29.70206809 +0000 UTC m=+6381.305481161" lastFinishedPulling="2025-11-25 16:11:38.616776518 +0000 UTC m=+6390.220189589" observedRunningTime="2025-11-25 16:11:39.768240315 +0000 UTC m=+6391.371653386" watchObservedRunningTime="2025-11-25 16:11:39.777436835 +0000 UTC m=+6391.380849906" Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.790631 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz" podStartSLOduration=2.992364993 podStartE2EDuration="11.790615012s" podCreationTimestamp="2025-11-25 16:11:28 +0000 UTC" firstStartedPulling="2025-11-25 16:11:29.812298231 +0000 UTC m=+6381.415711292" lastFinishedPulling="2025-11-25 16:11:38.61054824 +0000 UTC m=+6390.213961311" observedRunningTime="2025-11-25 16:11:39.787031816 +0000 UTC m=+6391.390444887" watchObservedRunningTime="2025-11-25 16:11:39.790615012 +0000 UTC m=+6391.394028083" Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.840899 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-7v627" podStartSLOduration=3.203887914 podStartE2EDuration="11.840877486s" podCreationTimestamp="2025-11-25 16:11:28 +0000 UTC" firstStartedPulling="2025-11-25 16:11:29.982067118 +0000 UTC m=+6381.585480189" lastFinishedPulling="2025-11-25 16:11:38.61905669 +0000 UTC m=+6390.222469761" observedRunningTime="2025-11-25 16:11:39.805108096 +0000 UTC m=+6391.408521167" watchObservedRunningTime="2025-11-25 16:11:39.840877486 +0000 UTC m=+6391.444290547" Nov 25 16:11:39 crc kubenswrapper[4879]: I1125 16:11:39.853059 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v" podStartSLOduration=3.044601541 podStartE2EDuration="11.853031986s" podCreationTimestamp="2025-11-25 16:11:28 +0000 UTC" firstStartedPulling="2025-11-25 16:11:29.802577227 +0000 UTC m=+6381.405990308" lastFinishedPulling="2025-11-25 16:11:38.611007682 +0000 UTC m=+6390.214420753" observedRunningTime="2025-11-25 16:11:39.821416258 +0000 UTC m=+6391.424829329" watchObservedRunningTime="2025-11-25 16:11:39.853031986 +0000 UTC m=+6391.456445057" Nov 25 16:11:40 crc kubenswrapper[4879]: I1125 16:11:40.733347 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" event={"ID":"98180543-60cf-4cb7-bfc2-6ae3ce782dc1","Type":"ContainerStarted","Data":"203c4c1b6c57326d687dc482776acd1cf5a862e29ad37ff3b712ed12c5d38eca"} Nov 25 16:11:40 crc kubenswrapper[4879]: I1125 16:11:40.764533 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" podStartSLOduration=2.959600835 podStartE2EDuration="12.764509141s" podCreationTimestamp="2025-11-25 16:11:28 +0000 UTC" firstStartedPulling="2025-11-25 16:11:29.81519876 +0000 UTC m=+6381.418611831" lastFinishedPulling="2025-11-25 16:11:39.620107066 +0000 UTC m=+6391.223520137" observedRunningTime="2025-11-25 16:11:40.753102992 +0000 UTC m=+6392.356516083" watchObservedRunningTime="2025-11-25 16:11:40.764509141 +0000 UTC m=+6392.367922222" Nov 25 16:11:41 crc kubenswrapper[4879]: I1125 16:11:41.746865 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:41 crc kubenswrapper[4879]: I1125 16:11:41.750496 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-8dgzm" Nov 25 16:11:47 crc kubenswrapper[4879]: I1125 16:11:47.409249 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:11:47 crc kubenswrapper[4879]: I1125 16:11:47.409839 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:11:49 crc kubenswrapper[4879]: I1125 16:11:49.218986 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-7v627" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.159440 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.160187 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" containerName="openstackclient" containerID="cri-o://5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18" gracePeriod=2 Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.170674 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.236331 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 25 16:11:52 crc kubenswrapper[4879]: E1125 16:11:52.236845 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" containerName="openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.236874 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" containerName="openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.237158 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" containerName="openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.238005 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.268472 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.286656 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" podUID="08153598-9efb-4dd7-8189-7c1cfbd76505" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.337266 4879 scope.go:117] "RemoveContainer" containerID="d7d351252d44d94cbbe1d53ffe2a880f4c549c1f87a300e3870866a6739a2e12" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.349233 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fv7s\" (UniqueName: \"kubernetes.io/projected/08153598-9efb-4dd7-8189-7c1cfbd76505-kube-api-access-9fv7s\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.349288 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08153598-9efb-4dd7-8189-7c1cfbd76505-openstack-config\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.349366 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08153598-9efb-4dd7-8189-7c1cfbd76505-openstack-config-secret\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.397062 4879 scope.go:117] "RemoveContainer" containerID="4f59867611e79372bf1b946a40bacea60d1c8317da436acf349170856377503f" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.424698 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.426177 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.430443 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-8z6gf" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.444865 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.451457 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fv7s\" (UniqueName: \"kubernetes.io/projected/08153598-9efb-4dd7-8189-7c1cfbd76505-kube-api-access-9fv7s\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.451551 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08153598-9efb-4dd7-8189-7c1cfbd76505-openstack-config\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.451636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08153598-9efb-4dd7-8189-7c1cfbd76505-openstack-config-secret\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.451700 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb6h4\" (UniqueName: \"kubernetes.io/projected/7e4afeb0-b288-4960-905b-2cac22f0df5c-kube-api-access-hb6h4\") pod \"kube-state-metrics-0\" (UID: \"7e4afeb0-b288-4960-905b-2cac22f0df5c\") " pod="openstack/kube-state-metrics-0" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.455578 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/08153598-9efb-4dd7-8189-7c1cfbd76505-openstack-config\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.477067 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/08153598-9efb-4dd7-8189-7c1cfbd76505-openstack-config-secret\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.511432 4879 scope.go:117] "RemoveContainer" containerID="26cd97ce9d7ee86b17051d1bf10c7e31999b15085f6304e4c01d5174a2d5a309" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.555510 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb6h4\" (UniqueName: \"kubernetes.io/projected/7e4afeb0-b288-4960-905b-2cac22f0df5c-kube-api-access-hb6h4\") pod \"kube-state-metrics-0\" (UID: \"7e4afeb0-b288-4960-905b-2cac22f0df5c\") " pod="openstack/kube-state-metrics-0" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.588476 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fv7s\" (UniqueName: \"kubernetes.io/projected/08153598-9efb-4dd7-8189-7c1cfbd76505-kube-api-access-9fv7s\") pod \"openstackclient\" (UID: \"08153598-9efb-4dd7-8189-7c1cfbd76505\") " pod="openstack/openstackclient" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.608199 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb6h4\" (UniqueName: \"kubernetes.io/projected/7e4afeb0-b288-4960-905b-2cac22f0df5c-kube-api-access-hb6h4\") pod \"kube-state-metrics-0\" (UID: \"7e4afeb0-b288-4960-905b-2cac22f0df5c\") " pod="openstack/kube-state-metrics-0" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.671718 4879 scope.go:117] "RemoveContainer" containerID="c567daa090999a7a99210876f239137acc0f6558c86a8dcabde011c379ffd274" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.840624 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 25 16:11:52 crc kubenswrapper[4879]: I1125 16:11:52.858915 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.234147 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.236540 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.240707 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-ldzzv" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.240981 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.241085 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.241222 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.242051 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289390 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpcb5\" (UniqueName: \"kubernetes.io/projected/facb862c-2078-49c9-ab83-5cab4bcaee93-kube-api-access-hpcb5\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289458 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289490 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/facb862c-2078-49c9-ab83-5cab4bcaee93-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289551 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/facb862c-2078-49c9-ab83-5cab4bcaee93-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289590 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289613 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/facb862c-2078-49c9-ab83-5cab4bcaee93-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.289645 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398247 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/facb862c-2078-49c9-ab83-5cab4bcaee93-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398610 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/facb862c-2078-49c9-ab83-5cab4bcaee93-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398670 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398742 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpcb5\" (UniqueName: \"kubernetes.io/projected/facb862c-2078-49c9-ab83-5cab4bcaee93-kube-api-access-hpcb5\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398791 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.398825 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/facb862c-2078-49c9-ab83-5cab4bcaee93-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.401153 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/facb862c-2078-49c9-ab83-5cab4bcaee93-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.413706 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/facb862c-2078-49c9-ab83-5cab4bcaee93-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.421788 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.438101 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.467747 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/facb862c-2078-49c9-ab83-5cab4bcaee93-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.468139 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/facb862c-2078-49c9-ab83-5cab4bcaee93-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.468798 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpcb5\" (UniqueName: \"kubernetes.io/projected/facb862c-2078-49c9-ab83-5cab4bcaee93-kube-api-access-hpcb5\") pod \"alertmanager-metric-storage-0\" (UID: \"facb862c-2078-49c9-ab83-5cab4bcaee93\") " pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.568384 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.595985 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.783228 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.795652 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.799807 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.803896 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.804220 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.804361 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.804412 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-w5s78" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.804483 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.805418 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.932427 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3c36d2d1-d726-43e1-b145-42f753a0ef41-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.932491 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3c36d2d1-d726-43e1-b145-42f753a0ef41-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.932541 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pmn5\" (UniqueName: \"kubernetes.io/projected/3c36d2d1-d726-43e1-b145-42f753a0ef41-kube-api-access-6pmn5\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.932706 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.932754 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.933105 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3c36d2d1-d726-43e1-b145-42f753a0ef41-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.933243 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-config\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:53 crc kubenswrapper[4879]: I1125 16:11:53.933327 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.037536 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3c36d2d1-d726-43e1-b145-42f753a0ef41-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.037911 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-config\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.037956 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.037987 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3c36d2d1-d726-43e1-b145-42f753a0ef41-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.038030 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3c36d2d1-d726-43e1-b145-42f753a0ef41-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.038089 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pmn5\" (UniqueName: \"kubernetes.io/projected/3c36d2d1-d726-43e1-b145-42f753a0ef41-kube-api-access-6pmn5\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.038144 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.038175 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.039277 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3c36d2d1-d726-43e1-b145-42f753a0ef41-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.058207 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.059261 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-config\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.064280 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3c36d2d1-d726-43e1-b145-42f753a0ef41-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.068499 4879 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.068571 4879 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bcd5c4e5af20c8ad847f3633fafb7852095e021f10deda9e365edc03eaf20b9c/globalmount\"" pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.069415 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3c36d2d1-d726-43e1-b145-42f753a0ef41-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.070859 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.086842 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pmn5\" (UniqueName: \"kubernetes.io/projected/3c36d2d1-d726-43e1-b145-42f753a0ef41-kube-api-access-6pmn5\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.089607 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3c36d2d1-d726-43e1-b145-42f753a0ef41-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.187504 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-77e46543-31f4-4275-a6e2-368e4ec368a4\") pod \"prometheus-metric-storage-0\" (UID: \"3c36d2d1-d726-43e1-b145-42f753a0ef41\") " pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.397988 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.437979 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.524875 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.687731 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.762019 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config-secret\") pod \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.762232 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config\") pod \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.762447 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5jtm\" (UniqueName: \"kubernetes.io/projected/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-kube-api-access-c5jtm\") pod \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\" (UID: \"4dc81ff1-2201-4b21-8b7c-74940b4a8b24\") " Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.979937 4879 generic.go:334] "Generic (PLEG): container finished" podID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" containerID="5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18" exitCode=137 Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.980001 4879 scope.go:117] "RemoveContainer" containerID="5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.980011 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.981586 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7e4afeb0-b288-4960-905b-2cac22f0df5c","Type":"ContainerStarted","Data":"d39d5e3b24209692cd8f68da146e63dc55d1c0c7cd5022b8e8fd306b7f3e7790"} Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.982535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"facb862c-2078-49c9-ab83-5cab4bcaee93","Type":"ContainerStarted","Data":"ac65a5608c183de9013499b30c93747156c7f6e11f36125b4bc0b1af3e5d6cb7"} Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.983488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"08153598-9efb-4dd7-8189-7c1cfbd76505","Type":"ContainerStarted","Data":"e11ab38c9178033bf8df943d4fe642c5339d2df6cb3409dedd1e1d5cbb899a84"} Nov 25 16:11:54 crc kubenswrapper[4879]: I1125 16:11:54.984150 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-kube-api-access-c5jtm" (OuterVolumeSpecName: "kube-api-access-c5jtm") pod "4dc81ff1-2201-4b21-8b7c-74940b4a8b24" (UID: "4dc81ff1-2201-4b21-8b7c-74940b4a8b24"). InnerVolumeSpecName "kube-api-access-c5jtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.009523 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "4dc81ff1-2201-4b21-8b7c-74940b4a8b24" (UID: "4dc81ff1-2201-4b21-8b7c-74940b4a8b24"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.044257 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "4dc81ff1-2201-4b21-8b7c-74940b4a8b24" (UID: "4dc81ff1-2201-4b21-8b7c-74940b4a8b24"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.069646 4879 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.069682 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5jtm\" (UniqueName: \"kubernetes.io/projected/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-kube-api-access-c5jtm\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.069691 4879 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4dc81ff1-2201-4b21-8b7c-74940b4a8b24-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.085302 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.202828 4879 scope.go:117] "RemoveContainer" containerID="5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18" Nov 25 16:11:55 crc kubenswrapper[4879]: E1125 16:11:55.203355 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18\": container with ID starting with 5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18 not found: ID does not exist" containerID="5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.203399 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18"} err="failed to get container status \"5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18\": rpc error: code = NotFound desc = could not find container \"5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18\": container with ID starting with 5253b60648477fbca59f847ef5003c8cd5e563938c28de6ac709872039d8ce18 not found: ID does not exist" Nov 25 16:11:55 crc kubenswrapper[4879]: W1125 16:11:55.208060 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c36d2d1_d726_43e1_b145_42f753a0ef41.slice/crio-fe0c3251ac95d53d92b49405484e6c8a0f5830cf740ccf380e2264ff682fd9a5 WatchSource:0}: Error finding container fe0c3251ac95d53d92b49405484e6c8a0f5830cf740ccf380e2264ff682fd9a5: Status 404 returned error can't find the container with id fe0c3251ac95d53d92b49405484e6c8a0f5830cf740ccf380e2264ff682fd9a5 Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.297944 4879 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" podUID="08153598-9efb-4dd7-8189-7c1cfbd76505" Nov 25 16:11:55 crc kubenswrapper[4879]: I1125 16:11:55.659396 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc81ff1-2201-4b21-8b7c-74940b4a8b24" path="/var/lib/kubelet/pods/4dc81ff1-2201-4b21-8b7c-74940b4a8b24/volumes" Nov 25 16:11:56 crc kubenswrapper[4879]: I1125 16:11:56.003889 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"08153598-9efb-4dd7-8189-7c1cfbd76505","Type":"ContainerStarted","Data":"6ea387b36ca821ab6ace246d5d4fbd23fb8a53f8d89af1662f740f529db7d210"} Nov 25 16:11:56 crc kubenswrapper[4879]: I1125 16:11:56.011060 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3c36d2d1-d726-43e1-b145-42f753a0ef41","Type":"ContainerStarted","Data":"fe0c3251ac95d53d92b49405484e6c8a0f5830cf740ccf380e2264ff682fd9a5"} Nov 25 16:11:56 crc kubenswrapper[4879]: I1125 16:11:56.015421 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7e4afeb0-b288-4960-905b-2cac22f0df5c","Type":"ContainerStarted","Data":"0ebf04b9ade5c0f68bc634209321da44a7efe8bba9e83c7d5d4f5248976df85a"} Nov 25 16:11:56 crc kubenswrapper[4879]: I1125 16:11:56.015557 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 25 16:11:56 crc kubenswrapper[4879]: I1125 16:11:56.037177 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=4.037157426 podStartE2EDuration="4.037157426s" podCreationTimestamp="2025-11-25 16:11:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:11:56.030673399 +0000 UTC m=+6407.634086470" watchObservedRunningTime="2025-11-25 16:11:56.037157426 +0000 UTC m=+6407.640570507" Nov 25 16:11:56 crc kubenswrapper[4879]: I1125 16:11:56.048966 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.643339921 podStartE2EDuration="4.048950485s" podCreationTimestamp="2025-11-25 16:11:52 +0000 UTC" firstStartedPulling="2025-11-25 16:11:54.134979196 +0000 UTC m=+6405.738392267" lastFinishedPulling="2025-11-25 16:11:55.54058976 +0000 UTC m=+6407.144002831" observedRunningTime="2025-11-25 16:11:56.046624003 +0000 UTC m=+6407.650037074" watchObservedRunningTime="2025-11-25 16:11:56.048950485 +0000 UTC m=+6407.652363556" Nov 25 16:12:02 crc kubenswrapper[4879]: I1125 16:12:02.844837 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 25 16:12:03 crc kubenswrapper[4879]: I1125 16:12:03.092747 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"facb862c-2078-49c9-ab83-5cab4bcaee93","Type":"ContainerStarted","Data":"2b1b0f17c0285d6c9a4fcb5b50d6cf8200c848d29ebf22932fc0e219df14f788"} Nov 25 16:12:03 crc kubenswrapper[4879]: I1125 16:12:03.094362 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3c36d2d1-d726-43e1-b145-42f753a0ef41","Type":"ContainerStarted","Data":"c7ffc20fbc2252eef3f566a5fab9f66105669b5d45050f1114dd3f9fc78cfce6"} Nov 25 16:12:10 crc kubenswrapper[4879]: I1125 16:12:10.165910 4879 generic.go:334] "Generic (PLEG): container finished" podID="3c36d2d1-d726-43e1-b145-42f753a0ef41" containerID="c7ffc20fbc2252eef3f566a5fab9f66105669b5d45050f1114dd3f9fc78cfce6" exitCode=0 Nov 25 16:12:10 crc kubenswrapper[4879]: I1125 16:12:10.166105 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3c36d2d1-d726-43e1-b145-42f753a0ef41","Type":"ContainerDied","Data":"c7ffc20fbc2252eef3f566a5fab9f66105669b5d45050f1114dd3f9fc78cfce6"} Nov 25 16:12:10 crc kubenswrapper[4879]: I1125 16:12:10.168713 4879 generic.go:334] "Generic (PLEG): container finished" podID="facb862c-2078-49c9-ab83-5cab4bcaee93" containerID="2b1b0f17c0285d6c9a4fcb5b50d6cf8200c848d29ebf22932fc0e219df14f788" exitCode=0 Nov 25 16:12:10 crc kubenswrapper[4879]: I1125 16:12:10.168807 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"facb862c-2078-49c9-ab83-5cab4bcaee93","Type":"ContainerDied","Data":"2b1b0f17c0285d6c9a4fcb5b50d6cf8200c848d29ebf22932fc0e219df14f788"} Nov 25 16:12:17 crc kubenswrapper[4879]: I1125 16:12:17.408696 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:12:17 crc kubenswrapper[4879]: I1125 16:12:17.409210 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:12:19 crc kubenswrapper[4879]: I1125 16:12:19.255574 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3c36d2d1-d726-43e1-b145-42f753a0ef41","Type":"ContainerStarted","Data":"0a9a9995f194b8ada7897fd5b48c97406cf10c39627d826a11b1db88daa97d77"} Nov 25 16:12:19 crc kubenswrapper[4879]: I1125 16:12:19.257714 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"facb862c-2078-49c9-ab83-5cab4bcaee93","Type":"ContainerStarted","Data":"b15394f8ac291e4349090fc713f31e6efc6e5ebdc6578138e612092a7933696f"} Nov 25 16:12:22 crc kubenswrapper[4879]: I1125 16:12:22.287897 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"facb862c-2078-49c9-ab83-5cab4bcaee93","Type":"ContainerStarted","Data":"4c8ed4ed1dc790d1d78e8f1b9b4a47951c92ef7a6da8a39104946c26e1c5b459"} Nov 25 16:12:22 crc kubenswrapper[4879]: I1125 16:12:22.288590 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Nov 25 16:12:22 crc kubenswrapper[4879]: I1125 16:12:22.291589 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Nov 25 16:12:22 crc kubenswrapper[4879]: I1125 16:12:22.326222 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=5.698858367 podStartE2EDuration="29.32620123s" podCreationTimestamp="2025-11-25 16:11:53 +0000 UTC" firstStartedPulling="2025-11-25 16:11:54.54008276 +0000 UTC m=+6406.143495841" lastFinishedPulling="2025-11-25 16:12:18.167425613 +0000 UTC m=+6429.770838704" observedRunningTime="2025-11-25 16:12:22.316146587 +0000 UTC m=+6433.919559668" watchObservedRunningTime="2025-11-25 16:12:22.32620123 +0000 UTC m=+6433.929614291" Nov 25 16:12:23 crc kubenswrapper[4879]: I1125 16:12:23.301261 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3c36d2d1-d726-43e1-b145-42f753a0ef41","Type":"ContainerStarted","Data":"f51e27edaed6372275b6e4e3f455a5d48077f36feaa61deae09930f3bcdc21c6"} Nov 25 16:12:28 crc kubenswrapper[4879]: I1125 16:12:28.352464 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3c36d2d1-d726-43e1-b145-42f753a0ef41","Type":"ContainerStarted","Data":"00005d90821755e2a2edb242152ff1b761312953d096c0eaee1553774acc089e"} Nov 25 16:12:28 crc kubenswrapper[4879]: I1125 16:12:28.381836 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.225529658 podStartE2EDuration="36.381817451s" podCreationTimestamp="2025-11-25 16:11:52 +0000 UTC" firstStartedPulling="2025-11-25 16:11:55.210642517 +0000 UTC m=+6406.814055578" lastFinishedPulling="2025-11-25 16:12:27.3669303 +0000 UTC m=+6438.970343371" observedRunningTime="2025-11-25 16:12:28.375826639 +0000 UTC m=+6439.979239720" watchObservedRunningTime="2025-11-25 16:12:28.381817451 +0000 UTC m=+6439.985230522" Nov 25 16:12:29 crc kubenswrapper[4879]: I1125 16:12:29.438214 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.787451 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.791603 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.795644 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.796956 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.803740 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.966069 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-log-httpd\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.966425 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.966531 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.966649 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-run-httpd\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.966832 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79smx\" (UniqueName: \"kubernetes.io/projected/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-kube-api-access-79smx\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.966927 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-config-data\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:31 crc kubenswrapper[4879]: I1125 16:12:31.967027 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-scripts\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.070254 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-log-httpd\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.069610 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-log-httpd\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.070339 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.071218 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.071270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-run-httpd\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.071394 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79smx\" (UniqueName: \"kubernetes.io/projected/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-kube-api-access-79smx\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.071437 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-config-data\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.071517 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-scripts\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.071991 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-run-httpd\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.083753 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.084147 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-config-data\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.086581 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.089506 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-scripts\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.092805 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79smx\" (UniqueName: \"kubernetes.io/projected/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-kube-api-access-79smx\") pod \"ceilometer-0\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.117643 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:12:32 crc kubenswrapper[4879]: I1125 16:12:32.676081 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:12:32 crc kubenswrapper[4879]: W1125 16:12:32.677970 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a6d7ce3_9e3a_4bb3_8540_83fa67fe180d.slice/crio-4d01982106dae23e97897b4f779daa7c3dfa8e2831ee132de9821614eee49930 WatchSource:0}: Error finding container 4d01982106dae23e97897b4f779daa7c3dfa8e2831ee132de9821614eee49930: Status 404 returned error can't find the container with id 4d01982106dae23e97897b4f779daa7c3dfa8e2831ee132de9821614eee49930 Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.130185 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-ab01-account-create-7c2ms"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.142504 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-ab01-account-create-7c2ms"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.154402 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-786c-account-create-vxgsn"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.166653 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-f52rd"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.178211 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d412-account-create-llfmj"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.190347 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-786c-account-create-vxgsn"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.201278 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-r2pkk"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.213004 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-f52rd"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.223545 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d412-account-create-llfmj"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.234402 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-gbd6q"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.247068 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-r2pkk"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.258113 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-gbd6q"] Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.401465 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerStarted","Data":"4d01982106dae23e97897b4f779daa7c3dfa8e2831ee132de9821614eee49930"} Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.666769 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19be5c82-30a2-4ec4-8d8d-a087ad293e27" path="/var/lib/kubelet/pods/19be5c82-30a2-4ec4-8d8d-a087ad293e27/volumes" Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.667961 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c60211d-f049-43ab-b32d-ea63f4bc8b7d" path="/var/lib/kubelet/pods/3c60211d-f049-43ab-b32d-ea63f4bc8b7d/volumes" Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.668564 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4754f5d0-b42f-43e6-987d-f2b28dba3afd" path="/var/lib/kubelet/pods/4754f5d0-b42f-43e6-987d-f2b28dba3afd/volumes" Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.669165 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c525edb-f49c-4350-a3df-c487163f3d31" path="/var/lib/kubelet/pods/9c525edb-f49c-4350-a3df-c487163f3d31/volumes" Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.672574 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2965a20-990b-495d-bfc0-97948383e941" path="/var/lib/kubelet/pods/a2965a20-990b-495d-bfc0-97948383e941/volumes" Nov 25 16:12:33 crc kubenswrapper[4879]: I1125 16:12:33.688650 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb7b8ca6-8b45-432a-8b46-83efaf30fe09" path="/var/lib/kubelet/pods/bb7b8ca6-8b45-432a-8b46-83efaf30fe09/volumes" Nov 25 16:12:34 crc kubenswrapper[4879]: I1125 16:12:34.414263 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerStarted","Data":"fb36307d1491d507403b26aac7162a327162e3c927f92f71f034e398e8f990fb"} Nov 25 16:12:35 crc kubenswrapper[4879]: I1125 16:12:35.426512 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerStarted","Data":"c7a46e814c3ca8f5f600ad8e39f09ee703879e797fd7f9ea3131b963927979b2"} Nov 25 16:12:36 crc kubenswrapper[4879]: I1125 16:12:36.438984 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerStarted","Data":"2b213da7a25f122876484f19b9645b043862bac9d25b1bd3b796c694a319bd95"} Nov 25 16:12:37 crc kubenswrapper[4879]: I1125 16:12:37.452286 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerStarted","Data":"21e9053abbb916dfd13d0773e94e026ee4464ef327c75dace8bb53e349e763c1"} Nov 25 16:12:37 crc kubenswrapper[4879]: I1125 16:12:37.452921 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 16:12:37 crc kubenswrapper[4879]: I1125 16:12:37.492175 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.237706995 podStartE2EDuration="6.492150467s" podCreationTimestamp="2025-11-25 16:12:31 +0000 UTC" firstStartedPulling="2025-11-25 16:12:32.680820513 +0000 UTC m=+6444.284233584" lastFinishedPulling="2025-11-25 16:12:36.935263985 +0000 UTC m=+6448.538677056" observedRunningTime="2025-11-25 16:12:37.482354611 +0000 UTC m=+6449.085767682" watchObservedRunningTime="2025-11-25 16:12:37.492150467 +0000 UTC m=+6449.095563538" Nov 25 16:12:39 crc kubenswrapper[4879]: I1125 16:12:39.438307 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Nov 25 16:12:39 crc kubenswrapper[4879]: I1125 16:12:39.442763 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Nov 25 16:12:39 crc kubenswrapper[4879]: I1125 16:12:39.473735 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.041737 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k8bkz"] Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.059376 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-k8bkz"] Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.509197 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-dq494"] Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.511365 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.528473 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-dq494"] Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.632868 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-operator-scripts\") pod \"aodh-db-create-dq494\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.633385 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xcxb\" (UniqueName: \"kubernetes.io/projected/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-kube-api-access-6xcxb\") pod \"aodh-db-create-dq494\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.658589 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="083a4b41-2786-4285-a0db-b16ad4840857" path="/var/lib/kubelet/pods/083a4b41-2786-4285-a0db-b16ad4840857/volumes" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.735414 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-operator-scripts\") pod \"aodh-db-create-dq494\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.735570 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xcxb\" (UniqueName: \"kubernetes.io/projected/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-kube-api-access-6xcxb\") pod \"aodh-db-create-dq494\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.736333 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-operator-scripts\") pod \"aodh-db-create-dq494\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.738583 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-5cc7-account-create-w7nlh"] Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.740347 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.742472 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.749104 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-5cc7-account-create-w7nlh"] Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.777863 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xcxb\" (UniqueName: \"kubernetes.io/projected/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-kube-api-access-6xcxb\") pod \"aodh-db-create-dq494\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.837654 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eabac680-caa0-462c-9f71-ec0a1ae262de-operator-scripts\") pod \"aodh-5cc7-account-create-w7nlh\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.838278 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6p69\" (UniqueName: \"kubernetes.io/projected/eabac680-caa0-462c-9f71-ec0a1ae262de-kube-api-access-s6p69\") pod \"aodh-5cc7-account-create-w7nlh\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.838692 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-dq494" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.941298 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6p69\" (UniqueName: \"kubernetes.io/projected/eabac680-caa0-462c-9f71-ec0a1ae262de-kube-api-access-s6p69\") pod \"aodh-5cc7-account-create-w7nlh\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.941721 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eabac680-caa0-462c-9f71-ec0a1ae262de-operator-scripts\") pod \"aodh-5cc7-account-create-w7nlh\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.942590 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eabac680-caa0-462c-9f71-ec0a1ae262de-operator-scripts\") pod \"aodh-5cc7-account-create-w7nlh\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:43 crc kubenswrapper[4879]: I1125 16:12:43.968178 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6p69\" (UniqueName: \"kubernetes.io/projected/eabac680-caa0-462c-9f71-ec0a1ae262de-kube-api-access-s6p69\") pod \"aodh-5cc7-account-create-w7nlh\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:44 crc kubenswrapper[4879]: I1125 16:12:44.058502 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:44 crc kubenswrapper[4879]: I1125 16:12:44.175137 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-dq494"] Nov 25 16:12:44 crc kubenswrapper[4879]: I1125 16:12:44.555066 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-dq494" event={"ID":"11d2ce32-3fdf-486f-86ce-6dd86a0e272b","Type":"ContainerStarted","Data":"e8a28495706129a48858bfacb91eaa98701fe0518462e9f0c382385113c87af6"} Nov 25 16:12:44 crc kubenswrapper[4879]: I1125 16:12:44.555460 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-dq494" event={"ID":"11d2ce32-3fdf-486f-86ce-6dd86a0e272b","Type":"ContainerStarted","Data":"bb94fade0f58845973a9bc7de20d0096853dba4fc5d52030b9bffceba28d9eac"} Nov 25 16:12:44 crc kubenswrapper[4879]: I1125 16:12:44.586192 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-create-dq494" podStartSLOduration=1.586171277 podStartE2EDuration="1.586171277s" podCreationTimestamp="2025-11-25 16:12:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:12:44.577314617 +0000 UTC m=+6456.180727698" watchObservedRunningTime="2025-11-25 16:12:44.586171277 +0000 UTC m=+6456.189584368" Nov 25 16:12:44 crc kubenswrapper[4879]: I1125 16:12:44.604328 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-5cc7-account-create-w7nlh"] Nov 25 16:12:45 crc kubenswrapper[4879]: I1125 16:12:45.567020 4879 generic.go:334] "Generic (PLEG): container finished" podID="eabac680-caa0-462c-9f71-ec0a1ae262de" containerID="2c19422b7cd06a9c3b5c5da6cdd4f9951a19cd4f96422694ed0688a596b483a3" exitCode=0 Nov 25 16:12:45 crc kubenswrapper[4879]: I1125 16:12:45.567217 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5cc7-account-create-w7nlh" event={"ID":"eabac680-caa0-462c-9f71-ec0a1ae262de","Type":"ContainerDied","Data":"2c19422b7cd06a9c3b5c5da6cdd4f9951a19cd4f96422694ed0688a596b483a3"} Nov 25 16:12:45 crc kubenswrapper[4879]: I1125 16:12:45.567288 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5cc7-account-create-w7nlh" event={"ID":"eabac680-caa0-462c-9f71-ec0a1ae262de","Type":"ContainerStarted","Data":"b328cfb666e53d29f4894e5049741915dbbf08a3deae04fcced779e013263c50"} Nov 25 16:12:45 crc kubenswrapper[4879]: I1125 16:12:45.569192 4879 generic.go:334] "Generic (PLEG): container finished" podID="11d2ce32-3fdf-486f-86ce-6dd86a0e272b" containerID="e8a28495706129a48858bfacb91eaa98701fe0518462e9f0c382385113c87af6" exitCode=0 Nov 25 16:12:45 crc kubenswrapper[4879]: I1125 16:12:45.569243 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-dq494" event={"ID":"11d2ce32-3fdf-486f-86ce-6dd86a0e272b","Type":"ContainerDied","Data":"e8a28495706129a48858bfacb91eaa98701fe0518462e9f0c382385113c87af6"} Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.074486 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.083801 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-dq494" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.231114 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eabac680-caa0-462c-9f71-ec0a1ae262de-operator-scripts\") pod \"eabac680-caa0-462c-9f71-ec0a1ae262de\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.231297 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6p69\" (UniqueName: \"kubernetes.io/projected/eabac680-caa0-462c-9f71-ec0a1ae262de-kube-api-access-s6p69\") pod \"eabac680-caa0-462c-9f71-ec0a1ae262de\" (UID: \"eabac680-caa0-462c-9f71-ec0a1ae262de\") " Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.231344 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-operator-scripts\") pod \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.231535 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xcxb\" (UniqueName: \"kubernetes.io/projected/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-kube-api-access-6xcxb\") pod \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\" (UID: \"11d2ce32-3fdf-486f-86ce-6dd86a0e272b\") " Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.231851 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eabac680-caa0-462c-9f71-ec0a1ae262de-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eabac680-caa0-462c-9f71-ec0a1ae262de" (UID: "eabac680-caa0-462c-9f71-ec0a1ae262de"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.231849 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "11d2ce32-3fdf-486f-86ce-6dd86a0e272b" (UID: "11d2ce32-3fdf-486f-86ce-6dd86a0e272b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.232005 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.232023 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eabac680-caa0-462c-9f71-ec0a1ae262de-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.236277 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eabac680-caa0-462c-9f71-ec0a1ae262de-kube-api-access-s6p69" (OuterVolumeSpecName: "kube-api-access-s6p69") pod "eabac680-caa0-462c-9f71-ec0a1ae262de" (UID: "eabac680-caa0-462c-9f71-ec0a1ae262de"). InnerVolumeSpecName "kube-api-access-s6p69". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.236316 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-kube-api-access-6xcxb" (OuterVolumeSpecName: "kube-api-access-6xcxb") pod "11d2ce32-3fdf-486f-86ce-6dd86a0e272b" (UID: "11d2ce32-3fdf-486f-86ce-6dd86a0e272b"). InnerVolumeSpecName "kube-api-access-6xcxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.333960 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6p69\" (UniqueName: \"kubernetes.io/projected/eabac680-caa0-462c-9f71-ec0a1ae262de-kube-api-access-s6p69\") on node \"crc\" DevicePath \"\"" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.334313 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xcxb\" (UniqueName: \"kubernetes.io/projected/11d2ce32-3fdf-486f-86ce-6dd86a0e272b-kube-api-access-6xcxb\") on node \"crc\" DevicePath \"\"" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.408998 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.409068 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.409147 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.409957 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.410021 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" gracePeriod=600 Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.589632 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5cc7-account-create-w7nlh" event={"ID":"eabac680-caa0-462c-9f71-ec0a1ae262de","Type":"ContainerDied","Data":"b328cfb666e53d29f4894e5049741915dbbf08a3deae04fcced779e013263c50"} Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.589671 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b328cfb666e53d29f4894e5049741915dbbf08a3deae04fcced779e013263c50" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.589669 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5cc7-account-create-w7nlh" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.591513 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-dq494" event={"ID":"11d2ce32-3fdf-486f-86ce-6dd86a0e272b","Type":"ContainerDied","Data":"bb94fade0f58845973a9bc7de20d0096853dba4fc5d52030b9bffceba28d9eac"} Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.591551 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb94fade0f58845973a9bc7de20d0096853dba4fc5d52030b9bffceba28d9eac" Nov 25 16:12:47 crc kubenswrapper[4879]: I1125 16:12:47.591609 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-dq494" Nov 25 16:12:49 crc kubenswrapper[4879]: I1125 16:12:49.611235 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" exitCode=0 Nov 25 16:12:49 crc kubenswrapper[4879]: I1125 16:12:49.611324 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4"} Nov 25 16:12:49 crc kubenswrapper[4879]: I1125 16:12:49.611686 4879 scope.go:117] "RemoveContainer" containerID="b21237490f0aefdd6974be0e787e306742d2154d0ff6faeec34f7e9ec28b48b8" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.054564 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-2jcrb"] Nov 25 16:12:50 crc kubenswrapper[4879]: E1125 16:12:50.055183 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d2ce32-3fdf-486f-86ce-6dd86a0e272b" containerName="mariadb-database-create" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.055204 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d2ce32-3fdf-486f-86ce-6dd86a0e272b" containerName="mariadb-database-create" Nov 25 16:12:50 crc kubenswrapper[4879]: E1125 16:12:50.055227 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eabac680-caa0-462c-9f71-ec0a1ae262de" containerName="mariadb-account-create" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.055233 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eabac680-caa0-462c-9f71-ec0a1ae262de" containerName="mariadb-account-create" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.055497 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d2ce32-3fdf-486f-86ce-6dd86a0e272b" containerName="mariadb-database-create" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.055535 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eabac680-caa0-462c-9f71-ec0a1ae262de" containerName="mariadb-account-create" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.056495 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.059552 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.059852 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-fx8cg" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.060805 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.061793 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.067022 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-2jcrb"] Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.192374 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-scripts\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.192481 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5snkv\" (UniqueName: \"kubernetes.io/projected/2db92ff9-020b-4235-b2bb-74ee897afee5-kube-api-access-5snkv\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.192583 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-config-data\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.192627 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-combined-ca-bundle\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.294837 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-config-data\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.294913 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-combined-ca-bundle\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.295354 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-scripts\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.295415 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5snkv\" (UniqueName: \"kubernetes.io/projected/2db92ff9-020b-4235-b2bb-74ee897afee5-kube-api-access-5snkv\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.301352 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-scripts\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.301697 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-config-data\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.302219 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-combined-ca-bundle\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.312763 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5snkv\" (UniqueName: \"kubernetes.io/projected/2db92ff9-020b-4235-b2bb-74ee897afee5-kube-api-access-5snkv\") pod \"aodh-db-sync-2jcrb\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.382198 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:12:50 crc kubenswrapper[4879]: E1125 16:12:50.453304 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.621046 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:12:50 crc kubenswrapper[4879]: E1125 16:12:50.621715 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:12:50 crc kubenswrapper[4879]: I1125 16:12:50.834648 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-2jcrb"] Nov 25 16:12:50 crc kubenswrapper[4879]: W1125 16:12:50.836364 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2db92ff9_020b_4235_b2bb_74ee897afee5.slice/crio-85dbbd2b0bf278d330b8134019d15b5805a366ff47b6afcb7eaeaea7afa461e2 WatchSource:0}: Error finding container 85dbbd2b0bf278d330b8134019d15b5805a366ff47b6afcb7eaeaea7afa461e2: Status 404 returned error can't find the container with id 85dbbd2b0bf278d330b8134019d15b5805a366ff47b6afcb7eaeaea7afa461e2 Nov 25 16:12:51 crc kubenswrapper[4879]: I1125 16:12:51.631089 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2jcrb" event={"ID":"2db92ff9-020b-4235-b2bb-74ee897afee5","Type":"ContainerStarted","Data":"85dbbd2b0bf278d330b8134019d15b5805a366ff47b6afcb7eaeaea7afa461e2"} Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.077031 4879 scope.go:117] "RemoveContainer" containerID="fbb7482b2235dae14390286d4fb6362c587b47ad9b54917eec3c6cdfb5e8bb26" Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.165087 4879 scope.go:117] "RemoveContainer" containerID="79de520561295a44d129f531fcd4366e23c35c0f00cf3c5990f7f3a32364b17a" Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.190876 4879 scope.go:117] "RemoveContainer" containerID="5c2ffe3cb4a82b2e5427b22d6a7185c3989c0c9a173486d8de04ad9fa2daab94" Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.269036 4879 scope.go:117] "RemoveContainer" containerID="cf4f3adeea56b6ed9f84d8eb510c8c2f25a4cdc54659d007c8ba24c285ec6d48" Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.324449 4879 scope.go:117] "RemoveContainer" containerID="78ffcf73b3b6ef721760359a710f0a610bb88545c896af29596ae1650ecce48d" Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.446544 4879 scope.go:117] "RemoveContainer" containerID="93c78a976db9d1a92e7f6eaaaa89213fe6e60ade7190c66ec94822d3308b1444" Nov 25 16:12:53 crc kubenswrapper[4879]: I1125 16:12:53.527250 4879 scope.go:117] "RemoveContainer" containerID="38a081af9f62eacc948fe468b2bbb17d16b7fb024be66cb344948ab42e07eae6" Nov 25 16:12:56 crc kubenswrapper[4879]: I1125 16:12:56.036197 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmphj"] Nov 25 16:12:56 crc kubenswrapper[4879]: I1125 16:12:56.050391 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmphj"] Nov 25 16:12:57 crc kubenswrapper[4879]: I1125 16:12:57.035411 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-mnrtm"] Nov 25 16:12:57 crc kubenswrapper[4879]: I1125 16:12:57.046946 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-mnrtm"] Nov 25 16:12:57 crc kubenswrapper[4879]: I1125 16:12:57.759759 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c54f8d8e-c352-4bb0-b0db-8afbdb6c944d" path="/var/lib/kubelet/pods/c54f8d8e-c352-4bb0-b0db-8afbdb6c944d/volumes" Nov 25 16:12:57 crc kubenswrapper[4879]: I1125 16:12:57.827085 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd000221-dd8b-4a66-9dfd-14d894fe8d37" path="/var/lib/kubelet/pods/cd000221-dd8b-4a66-9dfd-14d894fe8d37/volumes" Nov 25 16:13:01 crc kubenswrapper[4879]: I1125 16:13:01.645252 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:13:01 crc kubenswrapper[4879]: E1125 16:13:01.645742 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:13:02 crc kubenswrapper[4879]: I1125 16:13:02.131083 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 16:13:02 crc kubenswrapper[4879]: I1125 16:13:02.750587 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2jcrb" event={"ID":"2db92ff9-020b-4235-b2bb-74ee897afee5","Type":"ContainerStarted","Data":"37b1a754bac274e0d8f424bd4d62534521e5d6746bf5fbf4ac6238021ded3805"} Nov 25 16:13:02 crc kubenswrapper[4879]: I1125 16:13:02.785596 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-2jcrb" podStartSLOduration=1.648554715 podStartE2EDuration="12.785577693s" podCreationTimestamp="2025-11-25 16:12:50 +0000 UTC" firstStartedPulling="2025-11-25 16:12:50.839773512 +0000 UTC m=+6462.443186583" lastFinishedPulling="2025-11-25 16:13:01.97679649 +0000 UTC m=+6473.580209561" observedRunningTime="2025-11-25 16:13:02.773852824 +0000 UTC m=+6474.377265905" watchObservedRunningTime="2025-11-25 16:13:02.785577693 +0000 UTC m=+6474.388990764" Nov 25 16:13:06 crc kubenswrapper[4879]: I1125 16:13:06.798536 4879 generic.go:334] "Generic (PLEG): container finished" podID="2db92ff9-020b-4235-b2bb-74ee897afee5" containerID="37b1a754bac274e0d8f424bd4d62534521e5d6746bf5fbf4ac6238021ded3805" exitCode=0 Nov 25 16:13:06 crc kubenswrapper[4879]: I1125 16:13:06.798613 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2jcrb" event={"ID":"2db92ff9-020b-4235-b2bb-74ee897afee5","Type":"ContainerDied","Data":"37b1a754bac274e0d8f424bd4d62534521e5d6746bf5fbf4ac6238021ded3805"} Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.190964 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.289338 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-combined-ca-bundle\") pod \"2db92ff9-020b-4235-b2bb-74ee897afee5\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.289522 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-config-data\") pod \"2db92ff9-020b-4235-b2bb-74ee897afee5\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.289587 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-scripts\") pod \"2db92ff9-020b-4235-b2bb-74ee897afee5\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.289683 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5snkv\" (UniqueName: \"kubernetes.io/projected/2db92ff9-020b-4235-b2bb-74ee897afee5-kube-api-access-5snkv\") pod \"2db92ff9-020b-4235-b2bb-74ee897afee5\" (UID: \"2db92ff9-020b-4235-b2bb-74ee897afee5\") " Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.295189 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db92ff9-020b-4235-b2bb-74ee897afee5-kube-api-access-5snkv" (OuterVolumeSpecName: "kube-api-access-5snkv") pod "2db92ff9-020b-4235-b2bb-74ee897afee5" (UID: "2db92ff9-020b-4235-b2bb-74ee897afee5"). InnerVolumeSpecName "kube-api-access-5snkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.297333 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-scripts" (OuterVolumeSpecName: "scripts") pod "2db92ff9-020b-4235-b2bb-74ee897afee5" (UID: "2db92ff9-020b-4235-b2bb-74ee897afee5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.322587 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2db92ff9-020b-4235-b2bb-74ee897afee5" (UID: "2db92ff9-020b-4235-b2bb-74ee897afee5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.325244 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-config-data" (OuterVolumeSpecName: "config-data") pod "2db92ff9-020b-4235-b2bb-74ee897afee5" (UID: "2db92ff9-020b-4235-b2bb-74ee897afee5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.392008 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.392310 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.392325 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db92ff9-020b-4235-b2bb-74ee897afee5-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.392338 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5snkv\" (UniqueName: \"kubernetes.io/projected/2db92ff9-020b-4235-b2bb-74ee897afee5-kube-api-access-5snkv\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.818213 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-2jcrb" event={"ID":"2db92ff9-020b-4235-b2bb-74ee897afee5","Type":"ContainerDied","Data":"85dbbd2b0bf278d330b8134019d15b5805a366ff47b6afcb7eaeaea7afa461e2"} Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.818255 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85dbbd2b0bf278d330b8134019d15b5805a366ff47b6afcb7eaeaea7afa461e2" Nov 25 16:13:08 crc kubenswrapper[4879]: I1125 16:13:08.818302 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-2jcrb" Nov 25 16:13:12 crc kubenswrapper[4879]: I1125 16:13:12.053853 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-jdgpb"] Nov 25 16:13:12 crc kubenswrapper[4879]: I1125 16:13:12.065044 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-jdgpb"] Nov 25 16:13:12 crc kubenswrapper[4879]: I1125 16:13:12.645770 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:13:12 crc kubenswrapper[4879]: E1125 16:13:12.646281 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.613149 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Nov 25 16:13:13 crc kubenswrapper[4879]: E1125 16:13:13.614530 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db92ff9-020b-4235-b2bb-74ee897afee5" containerName="aodh-db-sync" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.614558 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db92ff9-020b-4235-b2bb-74ee897afee5" containerName="aodh-db-sync" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.614839 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db92ff9-020b-4235-b2bb-74ee897afee5" containerName="aodh-db-sync" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.618495 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.633337 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.637035 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.637462 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.638178 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-fx8cg" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.665480 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ceae1f-405c-440c-b3df-aa6a9289506c" path="/var/lib/kubelet/pods/30ceae1f-405c-440c-b3df-aa6a9289506c/volumes" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.703189 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksfhm\" (UniqueName: \"kubernetes.io/projected/a56d3003-901e-46ce-99d5-86ea06b4915a-kube-api-access-ksfhm\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.703350 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-scripts\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.703455 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-config-data\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.703482 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-combined-ca-bundle\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.805770 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-config-data\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.805820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-combined-ca-bundle\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.805916 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksfhm\" (UniqueName: \"kubernetes.io/projected/a56d3003-901e-46ce-99d5-86ea06b4915a-kube-api-access-ksfhm\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.805982 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-scripts\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.811737 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-scripts\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.811872 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-config-data\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.814217 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56d3003-901e-46ce-99d5-86ea06b4915a-combined-ca-bundle\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.823867 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksfhm\" (UniqueName: \"kubernetes.io/projected/a56d3003-901e-46ce-99d5-86ea06b4915a-kube-api-access-ksfhm\") pod \"aodh-0\" (UID: \"a56d3003-901e-46ce-99d5-86ea06b4915a\") " pod="openstack/aodh-0" Nov 25 16:13:13 crc kubenswrapper[4879]: I1125 16:13:13.953414 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 25 16:13:14 crc kubenswrapper[4879]: I1125 16:13:14.468665 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 25 16:13:14 crc kubenswrapper[4879]: I1125 16:13:14.883163 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"a56d3003-901e-46ce-99d5-86ea06b4915a","Type":"ContainerStarted","Data":"cf5b177e2277a1fdfd97d4091eb5a1f9fdbf668761c30fb8fb1769dc33eb6d05"} Nov 25 16:13:16 crc kubenswrapper[4879]: I1125 16:13:16.122505 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:13:16 crc kubenswrapper[4879]: I1125 16:13:16.123265 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-central-agent" containerID="cri-o://fb36307d1491d507403b26aac7162a327162e3c927f92f71f034e398e8f990fb" gracePeriod=30 Nov 25 16:13:16 crc kubenswrapper[4879]: I1125 16:13:16.123320 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="proxy-httpd" containerID="cri-o://21e9053abbb916dfd13d0773e94e026ee4464ef327c75dace8bb53e349e763c1" gracePeriod=30 Nov 25 16:13:16 crc kubenswrapper[4879]: I1125 16:13:16.123609 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-notification-agent" containerID="cri-o://c7a46e814c3ca8f5f600ad8e39f09ee703879e797fd7f9ea3131b963927979b2" gracePeriod=30 Nov 25 16:13:16 crc kubenswrapper[4879]: I1125 16:13:16.125046 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="sg-core" containerID="cri-o://2b213da7a25f122876484f19b9645b043862bac9d25b1bd3b796c694a319bd95" gracePeriod=30 Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.085710 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"a56d3003-901e-46ce-99d5-86ea06b4915a","Type":"ContainerStarted","Data":"cad22208e599cd7c3df7051113ef7ab131dbfafd77c6ba8025e7f481d40f8710"} Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.089494 4879 generic.go:334] "Generic (PLEG): container finished" podID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerID="21e9053abbb916dfd13d0773e94e026ee4464ef327c75dace8bb53e349e763c1" exitCode=0 Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.089526 4879 generic.go:334] "Generic (PLEG): container finished" podID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerID="2b213da7a25f122876484f19b9645b043862bac9d25b1bd3b796c694a319bd95" exitCode=2 Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.089539 4879 generic.go:334] "Generic (PLEG): container finished" podID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerID="fb36307d1491d507403b26aac7162a327162e3c927f92f71f034e398e8f990fb" exitCode=0 Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.089561 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerDied","Data":"21e9053abbb916dfd13d0773e94e026ee4464ef327c75dace8bb53e349e763c1"} Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.089582 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerDied","Data":"2b213da7a25f122876484f19b9645b043862bac9d25b1bd3b796c694a319bd95"} Nov 25 16:13:17 crc kubenswrapper[4879]: I1125 16:13:17.089594 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerDied","Data":"fb36307d1491d507403b26aac7162a327162e3c927f92f71f034e398e8f990fb"} Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.104605 4879 generic.go:334] "Generic (PLEG): container finished" podID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerID="c7a46e814c3ca8f5f600ad8e39f09ee703879e797fd7f9ea3131b963927979b2" exitCode=0 Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.104936 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerDied","Data":"c7a46e814c3ca8f5f600ad8e39f09ee703879e797fd7f9ea3131b963927979b2"} Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.397455 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.499733 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-log-httpd\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.499770 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-sg-core-conf-yaml\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.499882 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79smx\" (UniqueName: \"kubernetes.io/projected/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-kube-api-access-79smx\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.499923 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-combined-ca-bundle\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.499999 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-scripts\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.500082 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-config-data\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.500160 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-run-httpd\") pod \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\" (UID: \"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d\") " Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.500381 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.500932 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.501034 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.501050 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.504925 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-scripts" (OuterVolumeSpecName: "scripts") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.507852 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-kube-api-access-79smx" (OuterVolumeSpecName: "kube-api-access-79smx") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "kube-api-access-79smx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.534170 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.590235 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.602803 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.603078 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79smx\" (UniqueName: \"kubernetes.io/projected/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-kube-api-access-79smx\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.603206 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.603283 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.630480 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-config-data" (OuterVolumeSpecName: "config-data") pod "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" (UID: "5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:13:18 crc kubenswrapper[4879]: I1125 16:13:18.704928 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.116601 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"a56d3003-901e-46ce-99d5-86ea06b4915a","Type":"ContainerStarted","Data":"9214c1dccdc2b7ddb3599654dcc6fce39557416c0efec49c0ff9a58d6c7dee06"} Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.118846 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d","Type":"ContainerDied","Data":"4d01982106dae23e97897b4f779daa7c3dfa8e2831ee132de9821614eee49930"} Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.118889 4879 scope.go:117] "RemoveContainer" containerID="21e9053abbb916dfd13d0773e94e026ee4464ef327c75dace8bb53e349e763c1" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.119048 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.160763 4879 scope.go:117] "RemoveContainer" containerID="2b213da7a25f122876484f19b9645b043862bac9d25b1bd3b796c694a319bd95" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.175610 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.185052 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.191210 4879 scope.go:117] "RemoveContainer" containerID="c7a46e814c3ca8f5f600ad8e39f09ee703879e797fd7f9ea3131b963927979b2" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.195234 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:13:19 crc kubenswrapper[4879]: E1125 16:13:19.195868 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-notification-agent" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.195964 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-notification-agent" Nov 25 16:13:19 crc kubenswrapper[4879]: E1125 16:13:19.196057 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="sg-core" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.196160 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="sg-core" Nov 25 16:13:19 crc kubenswrapper[4879]: E1125 16:13:19.196260 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-central-agent" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.196337 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-central-agent" Nov 25 16:13:19 crc kubenswrapper[4879]: E1125 16:13:19.196449 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="proxy-httpd" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.196519 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="proxy-httpd" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.196902 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="proxy-httpd" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.196995 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="sg-core" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.197075 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-central-agent" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.197183 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" containerName="ceilometer-notification-agent" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.199742 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.203253 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.203941 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.208032 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.217708 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-log-httpd\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.218152 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-scripts\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.218508 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8c5f\" (UniqueName: \"kubernetes.io/projected/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-kube-api-access-w8c5f\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.217905 4879 scope.go:117] "RemoveContainer" containerID="fb36307d1491d507403b26aac7162a327162e3c927f92f71f034e398e8f990fb" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.221745 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-config-data\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.221840 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.221892 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.221921 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-run-httpd\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.323829 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.323920 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-run-httpd\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.324026 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-log-httpd\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.324077 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-scripts\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.324145 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8c5f\" (UniqueName: \"kubernetes.io/projected/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-kube-api-access-w8c5f\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.324301 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-config-data\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.324347 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.326148 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-log-httpd\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.328401 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-run-httpd\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.330651 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.332362 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.334089 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-config-data\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.345337 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8c5f\" (UniqueName: \"kubernetes.io/projected/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-kube-api-access-w8c5f\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.354085 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-scripts\") pod \"ceilometer-0\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.516936 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.674629 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d" path="/var/lib/kubelet/pods/5a6d7ce3-9e3a-4bb3-8540-83fa67fe180d/volumes" Nov 25 16:13:19 crc kubenswrapper[4879]: I1125 16:13:19.967913 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:13:20 crc kubenswrapper[4879]: I1125 16:13:20.129738 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerStarted","Data":"2705f8ca6d3b5740c9d21eb83cead4fb8d1bb145c5176b336e6e488c66934585"} Nov 25 16:13:22 crc kubenswrapper[4879]: I1125 16:13:22.154924 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerStarted","Data":"e811a20564337bd82d8eef1d678e4fc4b9172f9e42bf2952461588a71642f05c"} Nov 25 16:13:22 crc kubenswrapper[4879]: I1125 16:13:22.158555 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"a56d3003-901e-46ce-99d5-86ea06b4915a","Type":"ContainerStarted","Data":"90ad5260550c18ba87897b8a01763549b276bab605fe72b6a3418e73480c6be8"} Nov 25 16:13:23 crc kubenswrapper[4879]: I1125 16:13:23.168954 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerStarted","Data":"6b19a8760a329881c5aa4b510981c82a2184332eda22a1cfd5c982314e730b09"} Nov 25 16:13:23 crc kubenswrapper[4879]: I1125 16:13:23.645101 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:13:23 crc kubenswrapper[4879]: E1125 16:13:23.645387 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:13:27 crc kubenswrapper[4879]: I1125 16:13:27.210703 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"a56d3003-901e-46ce-99d5-86ea06b4915a","Type":"ContainerStarted","Data":"542167687dc3190174c681751da06e5c49d62f18974aedeefcc2a7031e8285e9"} Nov 25 16:13:27 crc kubenswrapper[4879]: I1125 16:13:27.214566 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerStarted","Data":"f290a3cd99925de521227dec7aeac2984e5d1136368ca917b164ecbe9ff4ccc2"} Nov 25 16:13:27 crc kubenswrapper[4879]: I1125 16:13:27.263850 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.187001984 podStartE2EDuration="14.263828764s" podCreationTimestamp="2025-11-25 16:13:13 +0000 UTC" firstStartedPulling="2025-11-25 16:13:14.483453795 +0000 UTC m=+6486.086866876" lastFinishedPulling="2025-11-25 16:13:26.560280595 +0000 UTC m=+6498.163693656" observedRunningTime="2025-11-25 16:13:27.23697641 +0000 UTC m=+6498.840389481" watchObservedRunningTime="2025-11-25 16:13:27.263828764 +0000 UTC m=+6498.867241825" Nov 25 16:13:30 crc kubenswrapper[4879]: I1125 16:13:30.249152 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerStarted","Data":"4bd61a383e586d644bcd9e2fb1600b3300bada1577fbc49802da2881fd0352b5"} Nov 25 16:13:30 crc kubenswrapper[4879]: I1125 16:13:30.249690 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 16:13:30 crc kubenswrapper[4879]: I1125 16:13:30.272931 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.435865395 podStartE2EDuration="11.272912862s" podCreationTimestamp="2025-11-25 16:13:19 +0000 UTC" firstStartedPulling="2025-11-25 16:13:19.970291466 +0000 UTC m=+6491.573704537" lastFinishedPulling="2025-11-25 16:13:29.807338933 +0000 UTC m=+6501.410752004" observedRunningTime="2025-11-25 16:13:30.267310274 +0000 UTC m=+6501.870723345" watchObservedRunningTime="2025-11-25 16:13:30.272912862 +0000 UTC m=+6501.876325933" Nov 25 16:13:35 crc kubenswrapper[4879]: I1125 16:13:35.644878 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:13:35 crc kubenswrapper[4879]: E1125 16:13:35.645804 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.837384 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-f49lf"] Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.840048 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f49lf" Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.853027 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-f49lf"] Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.912052 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-operator-scripts\") pod \"manila-db-create-f49lf\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " pod="openstack/manila-db-create-f49lf" Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.912227 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h4p7\" (UniqueName: \"kubernetes.io/projected/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-kube-api-access-5h4p7\") pod \"manila-db-create-f49lf\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " pod="openstack/manila-db-create-f49lf" Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.929923 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-e675-account-create-r7z7g"] Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.931352 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.933400 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Nov 25 16:13:36 crc kubenswrapper[4879]: I1125 16:13:36.947809 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-e675-account-create-r7z7g"] Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.013912 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h4p7\" (UniqueName: \"kubernetes.io/projected/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-kube-api-access-5h4p7\") pod \"manila-db-create-f49lf\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " pod="openstack/manila-db-create-f49lf" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.014772 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-operator-scripts\") pod \"manila-db-create-f49lf\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " pod="openstack/manila-db-create-f49lf" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.015665 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-operator-scripts\") pod \"manila-db-create-f49lf\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " pod="openstack/manila-db-create-f49lf" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.041810 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h4p7\" (UniqueName: \"kubernetes.io/projected/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-kube-api-access-5h4p7\") pod \"manila-db-create-f49lf\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " pod="openstack/manila-db-create-f49lf" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.117114 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq7l7\" (UniqueName: \"kubernetes.io/projected/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-kube-api-access-jq7l7\") pod \"manila-e675-account-create-r7z7g\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.117488 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-operator-scripts\") pod \"manila-e675-account-create-r7z7g\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.170790 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f49lf" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.219906 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq7l7\" (UniqueName: \"kubernetes.io/projected/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-kube-api-access-jq7l7\") pod \"manila-e675-account-create-r7z7g\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.219996 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-operator-scripts\") pod \"manila-e675-account-create-r7z7g\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.221027 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-operator-scripts\") pod \"manila-e675-account-create-r7z7g\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.238829 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq7l7\" (UniqueName: \"kubernetes.io/projected/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-kube-api-access-jq7l7\") pod \"manila-e675-account-create-r7z7g\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.250618 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:37 crc kubenswrapper[4879]: W1125 16:13:37.695018 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a89a828_7c8f_4ae4_a046_9e81fbb4969d.slice/crio-48ed21803ea9d68c682bd00308774b3b65799a333da21d76af8fa90429fd7d9d WatchSource:0}: Error finding container 48ed21803ea9d68c682bd00308774b3b65799a333da21d76af8fa90429fd7d9d: Status 404 returned error can't find the container with id 48ed21803ea9d68c682bd00308774b3b65799a333da21d76af8fa90429fd7d9d Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.700311 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-f49lf"] Nov 25 16:13:37 crc kubenswrapper[4879]: W1125 16:13:37.801090 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb021d6a6_a7d7_4f27_9f8e_47df75f82eed.slice/crio-a4ac41541f41cb2c078f9e92611623f8fb718ef305e4705547d06d793eaee38f WatchSource:0}: Error finding container a4ac41541f41cb2c078f9e92611623f8fb718ef305e4705547d06d793eaee38f: Status 404 returned error can't find the container with id a4ac41541f41cb2c078f9e92611623f8fb718ef305e4705547d06d793eaee38f Nov 25 16:13:37 crc kubenswrapper[4879]: I1125 16:13:37.801877 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-e675-account-create-r7z7g"] Nov 25 16:13:38 crc kubenswrapper[4879]: I1125 16:13:38.363641 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f49lf" event={"ID":"0a89a828-7c8f-4ae4-a046-9e81fbb4969d","Type":"ContainerStarted","Data":"358d30d407d2fe5ffe23ae886d1f16c41e43148584cbffeed10cf1141fa1592c"} Nov 25 16:13:38 crc kubenswrapper[4879]: I1125 16:13:38.363732 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f49lf" event={"ID":"0a89a828-7c8f-4ae4-a046-9e81fbb4969d","Type":"ContainerStarted","Data":"48ed21803ea9d68c682bd00308774b3b65799a333da21d76af8fa90429fd7d9d"} Nov 25 16:13:38 crc kubenswrapper[4879]: I1125 16:13:38.365196 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-e675-account-create-r7z7g" event={"ID":"b021d6a6-a7d7-4f27-9f8e-47df75f82eed","Type":"ContainerStarted","Data":"a007105d78cfd14b11e8f4ec9ccd9302dd455140290b018c012a3caf49ccb3a2"} Nov 25 16:13:38 crc kubenswrapper[4879]: I1125 16:13:38.365240 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-e675-account-create-r7z7g" event={"ID":"b021d6a6-a7d7-4f27-9f8e-47df75f82eed","Type":"ContainerStarted","Data":"a4ac41541f41cb2c078f9e92611623f8fb718ef305e4705547d06d793eaee38f"} Nov 25 16:13:39 crc kubenswrapper[4879]: I1125 16:13:39.391348 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-e675-account-create-r7z7g" podStartSLOduration=3.391327576 podStartE2EDuration="3.391327576s" podCreationTimestamp="2025-11-25 16:13:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:13:39.388793025 +0000 UTC m=+6510.992206096" watchObservedRunningTime="2025-11-25 16:13:39.391327576 +0000 UTC m=+6510.994740647" Nov 25 16:13:39 crc kubenswrapper[4879]: I1125 16:13:39.413202 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-f49lf" podStartSLOduration=3.413182699 podStartE2EDuration="3.413182699s" podCreationTimestamp="2025-11-25 16:13:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:13:39.401570093 +0000 UTC m=+6511.004983164" watchObservedRunningTime="2025-11-25 16:13:39.413182699 +0000 UTC m=+6511.016595770" Nov 25 16:13:40 crc kubenswrapper[4879]: I1125 16:13:40.385535 4879 generic.go:334] "Generic (PLEG): container finished" podID="0a89a828-7c8f-4ae4-a046-9e81fbb4969d" containerID="358d30d407d2fe5ffe23ae886d1f16c41e43148584cbffeed10cf1141fa1592c" exitCode=0 Nov 25 16:13:40 crc kubenswrapper[4879]: I1125 16:13:40.385745 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f49lf" event={"ID":"0a89a828-7c8f-4ae4-a046-9e81fbb4969d","Type":"ContainerDied","Data":"358d30d407d2fe5ffe23ae886d1f16c41e43148584cbffeed10cf1141fa1592c"} Nov 25 16:13:40 crc kubenswrapper[4879]: I1125 16:13:40.387839 4879 generic.go:334] "Generic (PLEG): container finished" podID="b021d6a6-a7d7-4f27-9f8e-47df75f82eed" containerID="a007105d78cfd14b11e8f4ec9ccd9302dd455140290b018c012a3caf49ccb3a2" exitCode=0 Nov 25 16:13:40 crc kubenswrapper[4879]: I1125 16:13:40.387874 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-e675-account-create-r7z7g" event={"ID":"b021d6a6-a7d7-4f27-9f8e-47df75f82eed","Type":"ContainerDied","Data":"a007105d78cfd14b11e8f4ec9ccd9302dd455140290b018c012a3caf49ccb3a2"} Nov 25 16:13:41 crc kubenswrapper[4879]: I1125 16:13:41.871550 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:41 crc kubenswrapper[4879]: I1125 16:13:41.872448 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f49lf" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.022282 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5h4p7\" (UniqueName: \"kubernetes.io/projected/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-kube-api-access-5h4p7\") pod \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.022354 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-operator-scripts\") pod \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.022500 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-operator-scripts\") pod \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\" (UID: \"0a89a828-7c8f-4ae4-a046-9e81fbb4969d\") " Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.022552 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq7l7\" (UniqueName: \"kubernetes.io/projected/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-kube-api-access-jq7l7\") pod \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\" (UID: \"b021d6a6-a7d7-4f27-9f8e-47df75f82eed\") " Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.023373 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0a89a828-7c8f-4ae4-a046-9e81fbb4969d" (UID: "0a89a828-7c8f-4ae4-a046-9e81fbb4969d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.023434 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b021d6a6-a7d7-4f27-9f8e-47df75f82eed" (UID: "b021d6a6-a7d7-4f27-9f8e-47df75f82eed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.023930 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.023974 4879 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.031708 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-kube-api-access-5h4p7" (OuterVolumeSpecName: "kube-api-access-5h4p7") pod "0a89a828-7c8f-4ae4-a046-9e81fbb4969d" (UID: "0a89a828-7c8f-4ae4-a046-9e81fbb4969d"). InnerVolumeSpecName "kube-api-access-5h4p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.032031 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-kube-api-access-jq7l7" (OuterVolumeSpecName: "kube-api-access-jq7l7") pod "b021d6a6-a7d7-4f27-9f8e-47df75f82eed" (UID: "b021d6a6-a7d7-4f27-9f8e-47df75f82eed"). InnerVolumeSpecName "kube-api-access-jq7l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.125723 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5h4p7\" (UniqueName: \"kubernetes.io/projected/0a89a828-7c8f-4ae4-a046-9e81fbb4969d-kube-api-access-5h4p7\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.125763 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq7l7\" (UniqueName: \"kubernetes.io/projected/b021d6a6-a7d7-4f27-9f8e-47df75f82eed-kube-api-access-jq7l7\") on node \"crc\" DevicePath \"\"" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.406730 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-f49lf" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.406706 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-f49lf" event={"ID":"0a89a828-7c8f-4ae4-a046-9e81fbb4969d","Type":"ContainerDied","Data":"48ed21803ea9d68c682bd00308774b3b65799a333da21d76af8fa90429fd7d9d"} Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.406794 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48ed21803ea9d68c682bd00308774b3b65799a333da21d76af8fa90429fd7d9d" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.408283 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-e675-account-create-r7z7g" event={"ID":"b021d6a6-a7d7-4f27-9f8e-47df75f82eed","Type":"ContainerDied","Data":"a4ac41541f41cb2c078f9e92611623f8fb718ef305e4705547d06d793eaee38f"} Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.408319 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-e675-account-create-r7z7g" Nov 25 16:13:42 crc kubenswrapper[4879]: I1125 16:13:42.408323 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4ac41541f41cb2c078f9e92611623f8fb718ef305e4705547d06d793eaee38f" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.303151 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-6q5vt"] Nov 25 16:13:47 crc kubenswrapper[4879]: E1125 16:13:47.304281 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a89a828-7c8f-4ae4-a046-9e81fbb4969d" containerName="mariadb-database-create" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.304301 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a89a828-7c8f-4ae4-a046-9e81fbb4969d" containerName="mariadb-database-create" Nov 25 16:13:47 crc kubenswrapper[4879]: E1125 16:13:47.304346 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b021d6a6-a7d7-4f27-9f8e-47df75f82eed" containerName="mariadb-account-create" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.304355 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b021d6a6-a7d7-4f27-9f8e-47df75f82eed" containerName="mariadb-account-create" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.304678 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b021d6a6-a7d7-4f27-9f8e-47df75f82eed" containerName="mariadb-account-create" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.304699 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a89a828-7c8f-4ae4-a046-9e81fbb4969d" containerName="mariadb-database-create" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.305695 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.308136 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.308590 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-7xznl" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.314678 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-6q5vt"] Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.433918 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-combined-ca-bundle\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.433979 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlht6\" (UniqueName: \"kubernetes.io/projected/fc9955de-2773-439c-8ae3-0250704dbd65-kube-api-access-mlht6\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.434076 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-job-config-data\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.434196 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-config-data\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.535886 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-config-data\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.536000 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-combined-ca-bundle\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.536037 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlht6\" (UniqueName: \"kubernetes.io/projected/fc9955de-2773-439c-8ae3-0250704dbd65-kube-api-access-mlht6\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.536103 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-job-config-data\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.543356 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-job-config-data\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.545905 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-config-data\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.546838 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-combined-ca-bundle\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.557349 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlht6\" (UniqueName: \"kubernetes.io/projected/fc9955de-2773-439c-8ae3-0250704dbd65-kube-api-access-mlht6\") pod \"manila-db-sync-6q5vt\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.635691 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6q5vt" Nov 25 16:13:47 crc kubenswrapper[4879]: I1125 16:13:47.644834 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:13:47 crc kubenswrapper[4879]: E1125 16:13:47.645390 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:13:48 crc kubenswrapper[4879]: I1125 16:13:48.220794 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-6q5vt"] Nov 25 16:13:48 crc kubenswrapper[4879]: I1125 16:13:48.477588 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6q5vt" event={"ID":"fc9955de-2773-439c-8ae3-0250704dbd65","Type":"ContainerStarted","Data":"a48954e0b61c8beb9e742b1215aa9ae3b0d6abf10a115be18f2535d2ed42446a"} Nov 25 16:13:49 crc kubenswrapper[4879]: I1125 16:13:49.526267 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 16:13:53 crc kubenswrapper[4879]: I1125 16:13:53.054070 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-7vdtq"] Nov 25 16:13:53 crc kubenswrapper[4879]: I1125 16:13:53.066414 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-7vdtq"] Nov 25 16:13:53 crc kubenswrapper[4879]: I1125 16:13:53.676799 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db08830b-81f9-4ec0-a944-2ce3d14283ab" path="/var/lib/kubelet/pods/db08830b-81f9-4ec0-a944-2ce3d14283ab/volumes" Nov 25 16:13:53 crc kubenswrapper[4879]: I1125 16:13:53.943885 4879 scope.go:117] "RemoveContainer" containerID="e6f06990b782993a96b7d5ee073acf7985edfa0a52a06bcc03da16e98454835c" Nov 25 16:13:54 crc kubenswrapper[4879]: I1125 16:13:54.043985 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-acee-account-create-nrwzm"] Nov 25 16:13:54 crc kubenswrapper[4879]: I1125 16:13:54.056314 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-acee-account-create-nrwzm"] Nov 25 16:13:54 crc kubenswrapper[4879]: I1125 16:13:54.145358 4879 scope.go:117] "RemoveContainer" containerID="ab7f8ba18c86ea1eef8301e95c54db547483da2025edc51f07d1590860e1cbd3" Nov 25 16:13:54 crc kubenswrapper[4879]: I1125 16:13:54.173378 4879 scope.go:117] "RemoveContainer" containerID="7df753619ee1f46997dc5833356bdbc9731fd5318a77c9fecfa45c047318a824" Nov 25 16:13:54 crc kubenswrapper[4879]: I1125 16:13:54.339890 4879 scope.go:117] "RemoveContainer" containerID="e799b8b8e7655f8eece1ac632a23c39558a9b1a65eb5b90dc76c482d9b5d8c26" Nov 25 16:13:54 crc kubenswrapper[4879]: I1125 16:13:54.412220 4879 scope.go:117] "RemoveContainer" containerID="a98bf795693383c6b96bbbf33d40aafd54704a87d2152f5770b978eb6e4420c0" Nov 25 16:13:55 crc kubenswrapper[4879]: I1125 16:13:55.565068 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6q5vt" event={"ID":"fc9955de-2773-439c-8ae3-0250704dbd65","Type":"ContainerStarted","Data":"d3af482652567946c1d31642af7b04f54151e67dc1b90343e45eaeebaa4091c5"} Nov 25 16:13:55 crc kubenswrapper[4879]: I1125 16:13:55.582206 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-6q5vt" podStartSLOduration=2.641357983 podStartE2EDuration="8.582189968s" podCreationTimestamp="2025-11-25 16:13:47 +0000 UTC" firstStartedPulling="2025-11-25 16:13:48.234776 +0000 UTC m=+6519.838189071" lastFinishedPulling="2025-11-25 16:13:54.175607985 +0000 UTC m=+6525.779021056" observedRunningTime="2025-11-25 16:13:55.579971956 +0000 UTC m=+6527.183385037" watchObservedRunningTime="2025-11-25 16:13:55.582189968 +0000 UTC m=+6527.185603049" Nov 25 16:13:55 crc kubenswrapper[4879]: I1125 16:13:55.660876 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4cd190-d5bf-4367-ad51-61a1e6a873f4" path="/var/lib/kubelet/pods/cf4cd190-d5bf-4367-ad51-61a1e6a873f4/volumes" Nov 25 16:13:59 crc kubenswrapper[4879]: I1125 16:13:59.653398 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:13:59 crc kubenswrapper[4879]: E1125 16:13:59.654145 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.491598 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-58tx7"] Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.495632 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.502454 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-58tx7"] Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.640434 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5x4v\" (UniqueName: \"kubernetes.io/projected/f6873e0d-214a-4c7a-8a23-e91745ff04f1-kube-api-access-s5x4v\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.640629 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-utilities\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.640733 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-catalog-content\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.742683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-utilities\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.742769 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-catalog-content\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.742820 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5x4v\" (UniqueName: \"kubernetes.io/projected/f6873e0d-214a-4c7a-8a23-e91745ff04f1-kube-api-access-s5x4v\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.743615 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-utilities\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.743809 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-catalog-content\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.763710 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5x4v\" (UniqueName: \"kubernetes.io/projected/f6873e0d-214a-4c7a-8a23-e91745ff04f1-kube-api-access-s5x4v\") pod \"community-operators-58tx7\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:00 crc kubenswrapper[4879]: I1125 16:14:00.815184 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:01 crc kubenswrapper[4879]: W1125 16:14:01.277724 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6873e0d_214a_4c7a_8a23_e91745ff04f1.slice/crio-c5767a383863f47e3922f86739ed1d6a56092c216a76a432e01a8d34d124a48b WatchSource:0}: Error finding container c5767a383863f47e3922f86739ed1d6a56092c216a76a432e01a8d34d124a48b: Status 404 returned error can't find the container with id c5767a383863f47e3922f86739ed1d6a56092c216a76a432e01a8d34d124a48b Nov 25 16:14:01 crc kubenswrapper[4879]: I1125 16:14:01.282775 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-58tx7"] Nov 25 16:14:01 crc kubenswrapper[4879]: I1125 16:14:01.640031 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerStarted","Data":"c5767a383863f47e3922f86739ed1d6a56092c216a76a432e01a8d34d124a48b"} Nov 25 16:14:02 crc kubenswrapper[4879]: I1125 16:14:02.651247 4879 generic.go:334] "Generic (PLEG): container finished" podID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerID="db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f" exitCode=0 Nov 25 16:14:02 crc kubenswrapper[4879]: I1125 16:14:02.651450 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerDied","Data":"db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f"} Nov 25 16:14:03 crc kubenswrapper[4879]: I1125 16:14:03.665622 4879 generic.go:334] "Generic (PLEG): container finished" podID="fc9955de-2773-439c-8ae3-0250704dbd65" containerID="d3af482652567946c1d31642af7b04f54151e67dc1b90343e45eaeebaa4091c5" exitCode=0 Nov 25 16:14:03 crc kubenswrapper[4879]: I1125 16:14:03.665724 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6q5vt" event={"ID":"fc9955de-2773-439c-8ae3-0250704dbd65","Type":"ContainerDied","Data":"d3af482652567946c1d31642af7b04f54151e67dc1b90343e45eaeebaa4091c5"} Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.037768 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-hxldl"] Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.050818 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-hxldl"] Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.677744 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerStarted","Data":"6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a"} Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.867329 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7x9sj"] Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.870445 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.880033 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x9sj"] Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.931317 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-utilities\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.931382 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-catalog-content\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:04 crc kubenswrapper[4879]: I1125 16:14:04.931474 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6wv9\" (UniqueName: \"kubernetes.io/projected/142b3996-a07f-4236-86df-adcff69ce6a4-kube-api-access-l6wv9\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.033979 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-utilities\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.034037 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-catalog-content\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.034079 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6wv9\" (UniqueName: \"kubernetes.io/projected/142b3996-a07f-4236-86df-adcff69ce6a4-kube-api-access-l6wv9\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.034877 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-catalog-content\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.034975 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-utilities\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.075545 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6wv9\" (UniqueName: \"kubernetes.io/projected/142b3996-a07f-4236-86df-adcff69ce6a4-kube-api-access-l6wv9\") pod \"redhat-marketplace-7x9sj\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.196446 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.321511 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6q5vt" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.444471 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-combined-ca-bundle\") pod \"fc9955de-2773-439c-8ae3-0250704dbd65\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.444724 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-config-data\") pod \"fc9955de-2773-439c-8ae3-0250704dbd65\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.444848 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-job-config-data\") pod \"fc9955de-2773-439c-8ae3-0250704dbd65\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.444996 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlht6\" (UniqueName: \"kubernetes.io/projected/fc9955de-2773-439c-8ae3-0250704dbd65-kube-api-access-mlht6\") pod \"fc9955de-2773-439c-8ae3-0250704dbd65\" (UID: \"fc9955de-2773-439c-8ae3-0250704dbd65\") " Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.450017 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "fc9955de-2773-439c-8ae3-0250704dbd65" (UID: "fc9955de-2773-439c-8ae3-0250704dbd65"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.452551 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc9955de-2773-439c-8ae3-0250704dbd65-kube-api-access-mlht6" (OuterVolumeSpecName: "kube-api-access-mlht6") pod "fc9955de-2773-439c-8ae3-0250704dbd65" (UID: "fc9955de-2773-439c-8ae3-0250704dbd65"). InnerVolumeSpecName "kube-api-access-mlht6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.467359 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-config-data" (OuterVolumeSpecName: "config-data") pod "fc9955de-2773-439c-8ae3-0250704dbd65" (UID: "fc9955de-2773-439c-8ae3-0250704dbd65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.493687 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc9955de-2773-439c-8ae3-0250704dbd65" (UID: "fc9955de-2773-439c-8ae3-0250704dbd65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.547487 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.547524 4879 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-job-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.547540 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlht6\" (UniqueName: \"kubernetes.io/projected/fc9955de-2773-439c-8ae3-0250704dbd65-kube-api-access-mlht6\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.547553 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9955de-2773-439c-8ae3-0250704dbd65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.661590 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b459cfc1-d98a-40f9-b99e-8a9c71cadf08" path="/var/lib/kubelet/pods/b459cfc1-d98a-40f9-b99e-8a9c71cadf08/volumes" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.662893 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x9sj"] Nov 25 16:14:05 crc kubenswrapper[4879]: W1125 16:14:05.666518 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod142b3996_a07f_4236_86df_adcff69ce6a4.slice/crio-f2797087672869f664fa71ab92ed1a428212fb8e6c422e8d58e54119ed255ef1 WatchSource:0}: Error finding container f2797087672869f664fa71ab92ed1a428212fb8e6c422e8d58e54119ed255ef1: Status 404 returned error can't find the container with id f2797087672869f664fa71ab92ed1a428212fb8e6c422e8d58e54119ed255ef1 Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.695941 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6q5vt" event={"ID":"fc9955de-2773-439c-8ae3-0250704dbd65","Type":"ContainerDied","Data":"a48954e0b61c8beb9e742b1215aa9ae3b0d6abf10a115be18f2535d2ed42446a"} Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.696009 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a48954e0b61c8beb9e742b1215aa9ae3b0d6abf10a115be18f2535d2ed42446a" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.695963 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6q5vt" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.703196 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerStarted","Data":"f2797087672869f664fa71ab92ed1a428212fb8e6c422e8d58e54119ed255ef1"} Nov 25 16:14:05 crc kubenswrapper[4879]: E1125 16:14:05.765350 4879 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc9955de_2773_439c_8ae3_0250704dbd65.slice/crio-a48954e0b61c8beb9e742b1215aa9ae3b0d6abf10a115be18f2535d2ed42446a\": RecentStats: unable to find data in memory cache]" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.912431 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Nov 25 16:14:05 crc kubenswrapper[4879]: E1125 16:14:05.913033 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9955de-2773-439c-8ae3-0250704dbd65" containerName="manila-db-sync" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.913056 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9955de-2773-439c-8ae3-0250704dbd65" containerName="manila-db-sync" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.913345 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc9955de-2773-439c-8ae3-0250704dbd65" containerName="manila-db-sync" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.914842 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.927090 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-7xznl" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.927364 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.927537 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.927768 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Nov 25 16:14:05 crc kubenswrapper[4879]: I1125 16:14:05.933226 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.020459 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.028649 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.032362 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.041796 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.067722 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.067791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.067856 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-config-data\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.067963 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-scripts\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.068134 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.068356 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jwvs\" (UniqueName: \"kubernetes.io/projected/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-kube-api-access-7jwvs\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.119179 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-547577b957-5mvx2"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.121081 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.130377 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-547577b957-5mvx2"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.170991 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.171301 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.171417 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h78d7\" (UniqueName: \"kubernetes.io/projected/9b5edbf0-5410-4506-93b1-472e241e68c0-kube-api-access-h78d7\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.171550 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-scripts\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.171662 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-config-data\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.171782 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-scripts\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.171924 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.172027 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9b5edbf0-5410-4506-93b1-472e241e68c0-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.173245 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-config-data\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.173352 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9b5edbf0-5410-4506-93b1-472e241e68c0-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.173439 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9b5edbf0-5410-4506-93b1-472e241e68c0-ceph\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.173538 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.173617 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jwvs\" (UniqueName: \"kubernetes.io/projected/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-kube-api-access-7jwvs\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.173666 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.174450 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.179901 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.180443 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-config-data\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.192592 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-scripts\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.199708 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.202926 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jwvs\" (UniqueName: \"kubernetes.io/projected/b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc-kube-api-access-7jwvs\") pod \"manila-scheduler-0\" (UID: \"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc\") " pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275278 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h78d7\" (UniqueName: \"kubernetes.io/projected/9b5edbf0-5410-4506-93b1-472e241e68c0-kube-api-access-h78d7\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275531 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-scripts\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275624 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-config\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275718 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msb5k\" (UniqueName: \"kubernetes.io/projected/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-kube-api-access-msb5k\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275814 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9b5edbf0-5410-4506-93b1-472e241e68c0-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275889 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-config-data\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.275969 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9b5edbf0-5410-4506-93b1-472e241e68c0-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.276070 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9b5edbf0-5410-4506-93b1-472e241e68c0-ceph\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.276176 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-sb\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.276257 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.276342 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.276427 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-dns-svc\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.276535 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-nb\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.277234 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9b5edbf0-5410-4506-93b1-472e241e68c0-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.277430 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9b5edbf0-5410-4506-93b1-472e241e68c0-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.287875 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.290305 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9b5edbf0-5410-4506-93b1-472e241e68c0-ceph\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.290800 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.304098 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-config-data\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.324733 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.328267 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b5edbf0-5410-4506-93b1-472e241e68c0-scripts\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.328343 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.330383 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.334536 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.334775 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h78d7\" (UniqueName: \"kubernetes.io/projected/9b5edbf0-5410-4506-93b1-472e241e68c0-kube-api-access-h78d7\") pod \"manila-share-share1-0\" (UID: \"9b5edbf0-5410-4506-93b1-472e241e68c0\") " pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.345454 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.377974 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-sb\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.378066 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-dns-svc\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.378166 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-nb\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.378226 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-config\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.378267 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msb5k\" (UniqueName: \"kubernetes.io/projected/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-kube-api-access-msb5k\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.378933 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-sb\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.379678 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-nb\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.380329 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-dns-svc\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.380445 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-config\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.400223 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msb5k\" (UniqueName: \"kubernetes.io/projected/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-kube-api-access-msb5k\") pod \"dnsmasq-dns-547577b957-5mvx2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.407371 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.448563 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.480246 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwjns\" (UniqueName: \"kubernetes.io/projected/95f83580-e814-453a-9123-0490abae84f2-kube-api-access-wwjns\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.480293 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-config-data\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.481491 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95f83580-e814-453a-9123-0490abae84f2-logs\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.481517 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/95f83580-e814-453a-9123-0490abae84f2-etc-machine-id\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.481550 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-config-data-custom\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.481575 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-scripts\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.481637 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583616 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwjns\" (UniqueName: \"kubernetes.io/projected/95f83580-e814-453a-9123-0490abae84f2-kube-api-access-wwjns\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583675 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-config-data\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583758 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95f83580-e814-453a-9123-0490abae84f2-logs\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583791 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/95f83580-e814-453a-9123-0490abae84f2-etc-machine-id\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583846 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-config-data-custom\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583893 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-scripts\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.583993 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.584261 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/95f83580-e814-453a-9123-0490abae84f2-etc-machine-id\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.585212 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95f83580-e814-453a-9123-0490abae84f2-logs\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.589051 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.589088 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-config-data-custom\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.589478 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-scripts\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.589750 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95f83580-e814-453a-9123-0490abae84f2-config-data\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.620534 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwjns\" (UniqueName: \"kubernetes.io/projected/95f83580-e814-453a-9123-0490abae84f2-kube-api-access-wwjns\") pod \"manila-api-0\" (UID: \"95f83580-e814-453a-9123-0490abae84f2\") " pod="openstack/manila-api-0" Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.721325 4879 generic.go:334] "Generic (PLEG): container finished" podID="142b3996-a07f-4236-86df-adcff69ce6a4" containerID="7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601" exitCode=0 Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.721376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerDied","Data":"7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601"} Nov 25 16:14:06 crc kubenswrapper[4879]: I1125 16:14:06.832083 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 25 16:14:07 crc kubenswrapper[4879]: I1125 16:14:07.252965 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-547577b957-5mvx2"] Nov 25 16:14:07 crc kubenswrapper[4879]: I1125 16:14:07.384988 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 25 16:14:07 crc kubenswrapper[4879]: I1125 16:14:07.733011 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547577b957-5mvx2" event={"ID":"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2","Type":"ContainerStarted","Data":"90f51d7443d7dec8fb5b8e137fbbcf2fbf5f1a4527602da74c3fc7f089ec1ae4"} Nov 25 16:14:07 crc kubenswrapper[4879]: I1125 16:14:07.734720 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc","Type":"ContainerStarted","Data":"44bfc17d9734d99d3fc9eced0d485e9de3399bfd412267c7e91c93ba1a2306bd"} Nov 25 16:14:08 crc kubenswrapper[4879]: I1125 16:14:08.752002 4879 generic.go:334] "Generic (PLEG): container finished" podID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerID="a9dc63bd8c2c75762eaba481fde100256208f48befc08719e5f2d9d19a8f3ea8" exitCode=0 Nov 25 16:14:08 crc kubenswrapper[4879]: I1125 16:14:08.752074 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547577b957-5mvx2" event={"ID":"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2","Type":"ContainerDied","Data":"a9dc63bd8c2c75762eaba481fde100256208f48befc08719e5f2d9d19a8f3ea8"} Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.082077 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.765156 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.769755 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"95f83580-e814-453a-9123-0490abae84f2","Type":"ContainerStarted","Data":"0c0cae8a223ae36c694f19ac2c07f3aff72f9890c3b3c4df720b756792c84083"} Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.775511 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerStarted","Data":"568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2"} Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.786704 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547577b957-5mvx2" event={"ID":"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2","Type":"ContainerStarted","Data":"3c119bc159edb7d6f16d1c67b5b8f49cd25d24e955c260b1b90d9f09501d5d71"} Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.787872 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.799383 4879 generic.go:334] "Generic (PLEG): container finished" podID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerID="6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a" exitCode=0 Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.799466 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerDied","Data":"6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a"} Nov 25 16:14:09 crc kubenswrapper[4879]: I1125 16:14:09.820422 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-547577b957-5mvx2" podStartSLOduration=3.8204048090000002 podStartE2EDuration="3.820404809s" podCreationTimestamp="2025-11-25 16:14:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:14:09.818432904 +0000 UTC m=+6541.421845965" watchObservedRunningTime="2025-11-25 16:14:09.820404809 +0000 UTC m=+6541.423817880" Nov 25 16:14:10 crc kubenswrapper[4879]: I1125 16:14:10.818540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc","Type":"ContainerStarted","Data":"4a32366ede0c74814d53d1a41ca19b8f888309cc1342ae6cbb3e31322e193175"} Nov 25 16:14:10 crc kubenswrapper[4879]: I1125 16:14:10.821829 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9b5edbf0-5410-4506-93b1-472e241e68c0","Type":"ContainerStarted","Data":"734a3d2b05e29277d19c179d7ef99303060cc6eee717d6caa0efae383b3b68d9"} Nov 25 16:14:10 crc kubenswrapper[4879]: I1125 16:14:10.824910 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"95f83580-e814-453a-9123-0490abae84f2","Type":"ContainerStarted","Data":"aa17c111402b462ed1756d82dc94f5d2805141db5e34934ebb6f31fbf6ece198"} Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.029424 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.038280 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-central-agent" containerID="cri-o://e811a20564337bd82d8eef1d678e4fc4b9172f9e42bf2952461588a71642f05c" gracePeriod=30 Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.038527 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="proxy-httpd" containerID="cri-o://4bd61a383e586d644bcd9e2fb1600b3300bada1577fbc49802da2881fd0352b5" gracePeriod=30 Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.038676 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="sg-core" containerID="cri-o://f290a3cd99925de521227dec7aeac2984e5d1136368ca917b164ecbe9ff4ccc2" gracePeriod=30 Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.038753 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-notification-agent" containerID="cri-o://6b19a8760a329881c5aa4b510981c82a2184332eda22a1cfd5c982314e730b09" gracePeriod=30 Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.645598 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:14:11 crc kubenswrapper[4879]: E1125 16:14:11.646320 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.841487 4879 generic.go:334] "Generic (PLEG): container finished" podID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerID="f290a3cd99925de521227dec7aeac2984e5d1136368ca917b164ecbe9ff4ccc2" exitCode=2 Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.841573 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerDied","Data":"f290a3cd99925de521227dec7aeac2984e5d1136368ca917b164ecbe9ff4ccc2"} Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.845618 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc","Type":"ContainerStarted","Data":"7672a7f69ac6553de29e9358eb43026cc915ba68e3812da5d830bb856d5697f8"} Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.855639 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"95f83580-e814-453a-9123-0490abae84f2","Type":"ContainerStarted","Data":"92c146bb141ef44515051ddbec06a1548111bf01b524c0a2540cb84c3ea0ce6b"} Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.887950 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=5.024326033 podStartE2EDuration="6.887921187s" podCreationTimestamp="2025-11-25 16:14:05 +0000 UTC" firstStartedPulling="2025-11-25 16:14:07.373461171 +0000 UTC m=+6538.976874242" lastFinishedPulling="2025-11-25 16:14:09.237056325 +0000 UTC m=+6540.840469396" observedRunningTime="2025-11-25 16:14:11.874695176 +0000 UTC m=+6543.478108257" watchObservedRunningTime="2025-11-25 16:14:11.887921187 +0000 UTC m=+6543.491334278" Nov 25 16:14:11 crc kubenswrapper[4879]: I1125 16:14:11.935957 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.935940244 podStartE2EDuration="5.935940244s" podCreationTimestamp="2025-11-25 16:14:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:14:11.893848603 +0000 UTC m=+6543.497261674" watchObservedRunningTime="2025-11-25 16:14:11.935940244 +0000 UTC m=+6543.539353315" Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.871485 4879 generic.go:334] "Generic (PLEG): container finished" podID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerID="4bd61a383e586d644bcd9e2fb1600b3300bada1577fbc49802da2881fd0352b5" exitCode=0 Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.871772 4879 generic.go:334] "Generic (PLEG): container finished" podID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerID="e811a20564337bd82d8eef1d678e4fc4b9172f9e42bf2952461588a71642f05c" exitCode=0 Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.871698 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerDied","Data":"4bd61a383e586d644bcd9e2fb1600b3300bada1577fbc49802da2881fd0352b5"} Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.871839 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerDied","Data":"e811a20564337bd82d8eef1d678e4fc4b9172f9e42bf2952461588a71642f05c"} Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.876303 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerStarted","Data":"97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b"} Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.877823 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Nov 25 16:14:12 crc kubenswrapper[4879]: I1125 16:14:12.911071 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-58tx7" podStartSLOduration=3.983151752 podStartE2EDuration="12.911051057s" podCreationTimestamp="2025-11-25 16:14:00 +0000 UTC" firstStartedPulling="2025-11-25 16:14:02.653265611 +0000 UTC m=+6534.256678682" lastFinishedPulling="2025-11-25 16:14:11.581164916 +0000 UTC m=+6543.184577987" observedRunningTime="2025-11-25 16:14:12.896109618 +0000 UTC m=+6544.499522709" watchObservedRunningTime="2025-11-25 16:14:12.911051057 +0000 UTC m=+6544.514464128" Nov 25 16:14:15 crc kubenswrapper[4879]: I1125 16:14:15.915916 4879 generic.go:334] "Generic (PLEG): container finished" podID="142b3996-a07f-4236-86df-adcff69ce6a4" containerID="568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2" exitCode=0 Nov 25 16:14:15 crc kubenswrapper[4879]: I1125 16:14:15.916503 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerDied","Data":"568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2"} Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.290234 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.450291 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.539817 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86f775599f-txqnp"] Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.541410 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86f775599f-txqnp" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerName="dnsmasq-dns" containerID="cri-o://153887253f1dd05ebdb9f88e7098c11b8629d69c0f0e8bb6429cdc354a1b056e" gracePeriod=10 Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.933683 4879 generic.go:334] "Generic (PLEG): container finished" podID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerID="6b19a8760a329881c5aa4b510981c82a2184332eda22a1cfd5c982314e730b09" exitCode=0 Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.933761 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerDied","Data":"6b19a8760a329881c5aa4b510981c82a2184332eda22a1cfd5c982314e730b09"} Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.936728 4879 generic.go:334] "Generic (PLEG): container finished" podID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerID="153887253f1dd05ebdb9f88e7098c11b8629d69c0f0e8bb6429cdc354a1b056e" exitCode=0 Nov 25 16:14:16 crc kubenswrapper[4879]: I1125 16:14:16.936766 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f775599f-txqnp" event={"ID":"3a724b1f-c257-4848-b4c5-f63cea4928d9","Type":"ContainerDied","Data":"153887253f1dd05ebdb9f88e7098c11b8629d69c0f0e8bb6429cdc354a1b056e"} Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.252582 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.315589 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.328929 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-scripts\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329075 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-sg-core-conf-yaml\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329161 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-dns-svc\") pod \"3a724b1f-c257-4848-b4c5-f63cea4928d9\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329192 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-sb\") pod \"3a724b1f-c257-4848-b4c5-f63cea4928d9\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329225 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw8bv\" (UniqueName: \"kubernetes.io/projected/3a724b1f-c257-4848-b4c5-f63cea4928d9-kube-api-access-rw8bv\") pod \"3a724b1f-c257-4848-b4c5-f63cea4928d9\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329325 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-combined-ca-bundle\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329378 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-run-httpd\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329417 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-config\") pod \"3a724b1f-c257-4848-b4c5-f63cea4928d9\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329513 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8c5f\" (UniqueName: \"kubernetes.io/projected/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-kube-api-access-w8c5f\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329558 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-log-httpd\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329610 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-config-data\") pod \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\" (UID: \"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.329634 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-nb\") pod \"3a724b1f-c257-4848-b4c5-f63cea4928d9\" (UID: \"3a724b1f-c257-4848-b4c5-f63cea4928d9\") " Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.330234 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.330970 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.331210 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.331230 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.339917 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-scripts" (OuterVolumeSpecName: "scripts") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.347429 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-kube-api-access-w8c5f" (OuterVolumeSpecName: "kube-api-access-w8c5f") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "kube-api-access-w8c5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.380553 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a724b1f-c257-4848-b4c5-f63cea4928d9-kube-api-access-rw8bv" (OuterVolumeSpecName: "kube-api-access-rw8bv") pod "3a724b1f-c257-4848-b4c5-f63cea4928d9" (UID: "3a724b1f-c257-4848-b4c5-f63cea4928d9"). InnerVolumeSpecName "kube-api-access-rw8bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.421827 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.435175 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.435360 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.435383 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw8bv\" (UniqueName: \"kubernetes.io/projected/3a724b1f-c257-4848-b4c5-f63cea4928d9-kube-api-access-rw8bv\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.435395 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8c5f\" (UniqueName: \"kubernetes.io/projected/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-kube-api-access-w8c5f\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.451235 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3a724b1f-c257-4848-b4c5-f63cea4928d9" (UID: "3a724b1f-c257-4848-b4c5-f63cea4928d9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.496518 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3a724b1f-c257-4848-b4c5-f63cea4928d9" (UID: "3a724b1f-c257-4848-b4c5-f63cea4928d9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.497355 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-config" (OuterVolumeSpecName: "config") pod "3a724b1f-c257-4848-b4c5-f63cea4928d9" (UID: "3a724b1f-c257-4848-b4c5-f63cea4928d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.527481 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3a724b1f-c257-4848-b4c5-f63cea4928d9" (UID: "3a724b1f-c257-4848-b4c5-f63cea4928d9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.537309 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.537348 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.537361 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.537375 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3a724b1f-c257-4848-b4c5-f63cea4928d9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.561064 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.617363 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-config-data" (OuterVolumeSpecName: "config-data") pod "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" (UID: "ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.639873 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.639910 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.986812 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.986854 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c","Type":"ContainerDied","Data":"2705f8ca6d3b5740c9d21eb83cead4fb8d1bb145c5176b336e6e488c66934585"} Nov 25 16:14:19 crc kubenswrapper[4879]: I1125 16:14:19.987322 4879 scope.go:117] "RemoveContainer" containerID="4bd61a383e586d644bcd9e2fb1600b3300bada1577fbc49802da2881fd0352b5" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.000388 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86f775599f-txqnp" event={"ID":"3a724b1f-c257-4848-b4c5-f63cea4928d9","Type":"ContainerDied","Data":"52fa05bd2264ebdfb9a583236c981fae9e6103bb240b7fbfcee82b0b6a8de6ad"} Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.000515 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86f775599f-txqnp" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.020927 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9b5edbf0-5410-4506-93b1-472e241e68c0","Type":"ContainerStarted","Data":"e08fe5cfc1f62d6c83a58c31eb565aa2e04f0c66ff05a857c779a335737d3236"} Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.025802 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.027017 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerStarted","Data":"b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b"} Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.048663 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.069454 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86f775599f-txqnp"] Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.086450 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86f775599f-txqnp"] Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.096155 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:20 crc kubenswrapper[4879]: E1125 16:14:20.097702 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-notification-agent" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.097726 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-notification-agent" Nov 25 16:14:20 crc kubenswrapper[4879]: E1125 16:14:20.097778 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerName="dnsmasq-dns" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.097786 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerName="dnsmasq-dns" Nov 25 16:14:20 crc kubenswrapper[4879]: E1125 16:14:20.097808 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="proxy-httpd" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.097814 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="proxy-httpd" Nov 25 16:14:20 crc kubenswrapper[4879]: E1125 16:14:20.097837 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="sg-core" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.097863 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="sg-core" Nov 25 16:14:20 crc kubenswrapper[4879]: E1125 16:14:20.097890 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-central-agent" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.097901 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-central-agent" Nov 25 16:14:20 crc kubenswrapper[4879]: E1125 16:14:20.097957 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerName="init" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.098000 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerName="init" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.098608 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="sg-core" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.098854 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-central-agent" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.098881 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" containerName="dnsmasq-dns" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.098896 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="proxy-httpd" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.098919 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" containerName="ceilometer-notification-agent" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.099674 4879 scope.go:117] "RemoveContainer" containerID="f290a3cd99925de521227dec7aeac2984e5d1136368ca917b164ecbe9ff4ccc2" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.102785 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.109867 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.110168 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.124294 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.127289 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7x9sj" podStartSLOduration=3.4532387509999998 podStartE2EDuration="16.127268794s" podCreationTimestamp="2025-11-25 16:14:04 +0000 UTC" firstStartedPulling="2025-11-25 16:14:06.724419112 +0000 UTC m=+6538.327832183" lastFinishedPulling="2025-11-25 16:14:19.398449145 +0000 UTC m=+6551.001862226" observedRunningTime="2025-11-25 16:14:20.086415448 +0000 UTC m=+6551.689828519" watchObservedRunningTime="2025-11-25 16:14:20.127268794 +0000 UTC m=+6551.730681865" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.182995 4879 scope.go:117] "RemoveContainer" containerID="6b19a8760a329881c5aa4b510981c82a2184332eda22a1cfd5c982314e730b09" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.262813 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-config-data\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.262919 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-run-httpd\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.263223 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-scripts\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.263323 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvpj4\" (UniqueName: \"kubernetes.io/projected/76f6698a-0481-4369-be25-5d213c095b0b-kube-api-access-jvpj4\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.263344 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.263435 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.264807 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-log-httpd\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.289476 4879 scope.go:117] "RemoveContainer" containerID="e811a20564337bd82d8eef1d678e4fc4b9172f9e42bf2952461588a71642f05c" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.340438 4879 scope.go:117] "RemoveContainer" containerID="153887253f1dd05ebdb9f88e7098c11b8629d69c0f0e8bb6429cdc354a1b056e" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367282 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-run-httpd\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367405 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-scripts\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367522 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvpj4\" (UniqueName: \"kubernetes.io/projected/76f6698a-0481-4369-be25-5d213c095b0b-kube-api-access-jvpj4\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367550 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367622 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367660 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-log-httpd\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367689 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-config-data\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.367888 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-run-httpd\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.369241 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-log-httpd\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.374761 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.376295 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-config-data\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.397770 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-scripts\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.410358 4879 scope.go:117] "RemoveContainer" containerID="6642773a85fd1e63d7cbc8b00f01e2da008eebae79a871f85ba7190eed242ee9" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.421960 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvpj4\" (UniqueName: \"kubernetes.io/projected/76f6698a-0481-4369-be25-5d213c095b0b-kube-api-access-jvpj4\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.438072 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.449648 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.816212 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:20 crc kubenswrapper[4879]: I1125 16:14:20.816519 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:14:21 crc kubenswrapper[4879]: I1125 16:14:21.063948 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:21 crc kubenswrapper[4879]: I1125 16:14:21.118503 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:21 crc kubenswrapper[4879]: W1125 16:14:21.123321 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76f6698a_0481_4369_be25_5d213c095b0b.slice/crio-45a39979f88455bce1209b0710d330ecd0b177ffc65f9296263d2ec60e0f2969 WatchSource:0}: Error finding container 45a39979f88455bce1209b0710d330ecd0b177ffc65f9296263d2ec60e0f2969: Status 404 returned error can't find the container with id 45a39979f88455bce1209b0710d330ecd0b177ffc65f9296263d2ec60e0f2969 Nov 25 16:14:21 crc kubenswrapper[4879]: I1125 16:14:21.659029 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a724b1f-c257-4848-b4c5-f63cea4928d9" path="/var/lib/kubelet/pods/3a724b1f-c257-4848-b4c5-f63cea4928d9/volumes" Nov 25 16:14:21 crc kubenswrapper[4879]: I1125 16:14:21.660697 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c" path="/var/lib/kubelet/pods/ad91d24d-ea26-4ecf-8e01-9c5c2dcab24c/volumes" Nov 25 16:14:21 crc kubenswrapper[4879]: I1125 16:14:21.935718 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-58tx7" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" probeResult="failure" output=< Nov 25 16:14:21 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:14:21 crc kubenswrapper[4879]: > Nov 25 16:14:22 crc kubenswrapper[4879]: I1125 16:14:22.054553 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerStarted","Data":"45a39979f88455bce1209b0710d330ecd0b177ffc65f9296263d2ec60e0f2969"} Nov 25 16:14:22 crc kubenswrapper[4879]: I1125 16:14:22.056344 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9b5edbf0-5410-4506-93b1-472e241e68c0","Type":"ContainerStarted","Data":"386454d0b3e9a1c2c64fe7b6c1f858ba55d8f28022fcc8e7dd560a5cbd6a1660"} Nov 25 16:14:22 crc kubenswrapper[4879]: I1125 16:14:22.084335 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=7.873341789 podStartE2EDuration="17.08431723s" podCreationTimestamp="2025-11-25 16:14:05 +0000 UTC" firstStartedPulling="2025-11-25 16:14:09.785764268 +0000 UTC m=+6541.389177339" lastFinishedPulling="2025-11-25 16:14:18.996739709 +0000 UTC m=+6550.600152780" observedRunningTime="2025-11-25 16:14:22.075480912 +0000 UTC m=+6553.678893983" watchObservedRunningTime="2025-11-25 16:14:22.08431723 +0000 UTC m=+6553.687730301" Nov 25 16:14:24 crc kubenswrapper[4879]: I1125 16:14:24.078279 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerStarted","Data":"b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5"} Nov 25 16:14:24 crc kubenswrapper[4879]: I1125 16:14:24.645388 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:14:24 crc kubenswrapper[4879]: E1125 16:14:24.645920 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:14:25 crc kubenswrapper[4879]: I1125 16:14:25.196834 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:25 crc kubenswrapper[4879]: I1125 16:14:25.196897 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:26 crc kubenswrapper[4879]: I1125 16:14:26.249349 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-7x9sj" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="registry-server" probeResult="failure" output=< Nov 25 16:14:26 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:14:26 crc kubenswrapper[4879]: > Nov 25 16:14:26 crc kubenswrapper[4879]: I1125 16:14:26.408664 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Nov 25 16:14:27 crc kubenswrapper[4879]: I1125 16:14:27.119519 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerStarted","Data":"70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28"} Nov 25 16:14:27 crc kubenswrapper[4879]: I1125 16:14:27.975802 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Nov 25 16:14:28 crc kubenswrapper[4879]: I1125 16:14:28.131457 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerStarted","Data":"eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a"} Nov 25 16:14:28 crc kubenswrapper[4879]: I1125 16:14:28.254668 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.165587 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerStarted","Data":"9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6"} Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.165993 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-central-agent" containerID="cri-o://b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5" gracePeriod=30 Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.166195 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.166301 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="proxy-httpd" containerID="cri-o://9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6" gracePeriod=30 Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.166412 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="sg-core" containerID="cri-o://eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a" gracePeriod=30 Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.166460 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-notification-agent" containerID="cri-o://70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28" gracePeriod=30 Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.220903 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.288321587 podStartE2EDuration="11.220880113s" podCreationTimestamp="2025-11-25 16:14:20 +0000 UTC" firstStartedPulling="2025-11-25 16:14:21.12993742 +0000 UTC m=+6552.733350491" lastFinishedPulling="2025-11-25 16:14:30.062495956 +0000 UTC m=+6561.665909017" observedRunningTime="2025-11-25 16:14:31.198964568 +0000 UTC m=+6562.802377649" watchObservedRunningTime="2025-11-25 16:14:31.220880113 +0000 UTC m=+6562.824293184" Nov 25 16:14:31 crc kubenswrapper[4879]: I1125 16:14:31.866191 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-58tx7" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" probeResult="failure" output=< Nov 25 16:14:31 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:14:31 crc kubenswrapper[4879]: > Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.177783 4879 generic.go:334] "Generic (PLEG): container finished" podID="76f6698a-0481-4369-be25-5d213c095b0b" containerID="9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6" exitCode=0 Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.177842 4879 generic.go:334] "Generic (PLEG): container finished" podID="76f6698a-0481-4369-be25-5d213c095b0b" containerID="eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a" exitCode=2 Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.177886 4879 generic.go:334] "Generic (PLEG): container finished" podID="76f6698a-0481-4369-be25-5d213c095b0b" containerID="70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28" exitCode=0 Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.177885 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerDied","Data":"9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6"} Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.177956 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerDied","Data":"eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a"} Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.177970 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerDied","Data":"70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28"} Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.738648 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862089 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvpj4\" (UniqueName: \"kubernetes.io/projected/76f6698a-0481-4369-be25-5d213c095b0b-kube-api-access-jvpj4\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862264 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-run-httpd\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862319 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-sg-core-conf-yaml\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862462 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-config-data\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862606 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-scripts\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862648 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-log-httpd\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862697 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-combined-ca-bundle\") pod \"76f6698a-0481-4369-be25-5d213c095b0b\" (UID: \"76f6698a-0481-4369-be25-5d213c095b0b\") " Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.862811 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.863241 4879 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.863695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.869113 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-scripts" (OuterVolumeSpecName: "scripts") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.869433 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76f6698a-0481-4369-be25-5d213c095b0b-kube-api-access-jvpj4" (OuterVolumeSpecName: "kube-api-access-jvpj4") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "kube-api-access-jvpj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.894045 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.959627 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.967263 4879 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.967306 4879 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.967322 4879 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76f6698a-0481-4369-be25-5d213c095b0b-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.967338 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.967353 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvpj4\" (UniqueName: \"kubernetes.io/projected/76f6698a-0481-4369-be25-5d213c095b0b-kube-api-access-jvpj4\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:32 crc kubenswrapper[4879]: I1125 16:14:32.988402 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-config-data" (OuterVolumeSpecName: "config-data") pod "76f6698a-0481-4369-be25-5d213c095b0b" (UID: "76f6698a-0481-4369-be25-5d213c095b0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.068793 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76f6698a-0481-4369-be25-5d213c095b0b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.192217 4879 generic.go:334] "Generic (PLEG): container finished" podID="76f6698a-0481-4369-be25-5d213c095b0b" containerID="b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5" exitCode=0 Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.192345 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.192339 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerDied","Data":"b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5"} Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.193268 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76f6698a-0481-4369-be25-5d213c095b0b","Type":"ContainerDied","Data":"45a39979f88455bce1209b0710d330ecd0b177ffc65f9296263d2ec60e0f2969"} Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.193294 4879 scope.go:117] "RemoveContainer" containerID="9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.217971 4879 scope.go:117] "RemoveContainer" containerID="eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.229837 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.239817 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.241742 4879 scope.go:117] "RemoveContainer" containerID="70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.258813 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.259681 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-central-agent" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.259813 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-central-agent" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.259937 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-notification-agent" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.260018 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-notification-agent" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.260103 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="sg-core" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.260191 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="sg-core" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.260305 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="proxy-httpd" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.260380 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="proxy-httpd" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.260927 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-central-agent" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.261051 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="sg-core" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.261159 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="proxy-httpd" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.261274 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f6698a-0481-4369-be25-5d213c095b0b" containerName="ceilometer-notification-agent" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.264006 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.266551 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.269487 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274062 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f132884-e184-47e7-8a5e-61299b10f83b-log-httpd\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274192 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f132884-e184-47e7-8a5e-61299b10f83b-run-httpd\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274260 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49xtw\" (UniqueName: \"kubernetes.io/projected/3f132884-e184-47e7-8a5e-61299b10f83b-kube-api-access-49xtw\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274315 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-config-data\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274386 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274439 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.274486 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-scripts\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.275791 4879 scope.go:117] "RemoveContainer" containerID="b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.277269 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.354549 4879 scope.go:117] "RemoveContainer" containerID="9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.355059 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6\": container with ID starting with 9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6 not found: ID does not exist" containerID="9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355091 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6"} err="failed to get container status \"9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6\": rpc error: code = NotFound desc = could not find container \"9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6\": container with ID starting with 9b9c6a94a837877c7db40a90f026f4c52f3907b1872ab7ffba5de0e163ddc1d6 not found: ID does not exist" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355112 4879 scope.go:117] "RemoveContainer" containerID="eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.355496 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a\": container with ID starting with eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a not found: ID does not exist" containerID="eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355516 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a"} err="failed to get container status \"eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a\": rpc error: code = NotFound desc = could not find container \"eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a\": container with ID starting with eee527307426af89e2dc7503c99ffc4c1a7bb7ea63fd0cc533ee067fdb24a71a not found: ID does not exist" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355531 4879 scope.go:117] "RemoveContainer" containerID="70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.355708 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28\": container with ID starting with 70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28 not found: ID does not exist" containerID="70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355728 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28"} err="failed to get container status \"70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28\": rpc error: code = NotFound desc = could not find container \"70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28\": container with ID starting with 70cea33edefc0c52891418ab2968f2fa9f9d9c7580a1cce1452f848d5759db28 not found: ID does not exist" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355740 4879 scope.go:117] "RemoveContainer" containerID="b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5" Nov 25 16:14:33 crc kubenswrapper[4879]: E1125 16:14:33.355909 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5\": container with ID starting with b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5 not found: ID does not exist" containerID="b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.355929 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5"} err="failed to get container status \"b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5\": rpc error: code = NotFound desc = could not find container \"b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5\": container with ID starting with b7ff9562a6a2041756335809d49dabf138c37ac1a9116bca7e915fe9504aeda5 not found: ID does not exist" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376459 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f132884-e184-47e7-8a5e-61299b10f83b-run-httpd\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376530 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49xtw\" (UniqueName: \"kubernetes.io/projected/3f132884-e184-47e7-8a5e-61299b10f83b-kube-api-access-49xtw\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376585 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-config-data\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376644 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376679 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376709 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-scripts\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.376803 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f132884-e184-47e7-8a5e-61299b10f83b-log-httpd\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.377166 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f132884-e184-47e7-8a5e-61299b10f83b-run-httpd\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.377217 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f132884-e184-47e7-8a5e-61299b10f83b-log-httpd\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.381040 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.381314 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-config-data\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.381367 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-scripts\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.382232 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f132884-e184-47e7-8a5e-61299b10f83b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.392611 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49xtw\" (UniqueName: \"kubernetes.io/projected/3f132884-e184-47e7-8a5e-61299b10f83b-kube-api-access-49xtw\") pod \"ceilometer-0\" (UID: \"3f132884-e184-47e7-8a5e-61299b10f83b\") " pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.587109 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 25 16:14:33 crc kubenswrapper[4879]: I1125 16:14:33.657514 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76f6698a-0481-4369-be25-5d213c095b0b" path="/var/lib/kubelet/pods/76f6698a-0481-4369-be25-5d213c095b0b/volumes" Nov 25 16:14:34 crc kubenswrapper[4879]: I1125 16:14:34.120472 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 25 16:14:34 crc kubenswrapper[4879]: I1125 16:14:34.212399 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f132884-e184-47e7-8a5e-61299b10f83b","Type":"ContainerStarted","Data":"ea70e7fad985b2ac789a5a648cd4f85cf3a74eedc8abf0bb37a1496ab2a5f5f0"} Nov 25 16:14:35 crc kubenswrapper[4879]: I1125 16:14:35.255857 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:35 crc kubenswrapper[4879]: I1125 16:14:35.309155 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:36 crc kubenswrapper[4879]: I1125 16:14:36.071660 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x9sj"] Nov 25 16:14:36 crc kubenswrapper[4879]: I1125 16:14:36.244868 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f132884-e184-47e7-8a5e-61299b10f83b","Type":"ContainerStarted","Data":"aff1ed3dd9ff89e56c52d708007280b8d8aa42cd07a2ed0ead59380b6093092c"} Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.257307 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f132884-e184-47e7-8a5e-61299b10f83b","Type":"ContainerStarted","Data":"c0df213c7c1621b23c2b747d84b523bdf5d60435edf66bc8d9eecbcea5d13cb8"} Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.257468 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7x9sj" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="registry-server" containerID="cri-o://b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b" gracePeriod=2 Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.783838 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.882383 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6wv9\" (UniqueName: \"kubernetes.io/projected/142b3996-a07f-4236-86df-adcff69ce6a4-kube-api-access-l6wv9\") pod \"142b3996-a07f-4236-86df-adcff69ce6a4\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.882558 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-catalog-content\") pod \"142b3996-a07f-4236-86df-adcff69ce6a4\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.882646 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-utilities\") pod \"142b3996-a07f-4236-86df-adcff69ce6a4\" (UID: \"142b3996-a07f-4236-86df-adcff69ce6a4\") " Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.883307 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-utilities" (OuterVolumeSpecName: "utilities") pod "142b3996-a07f-4236-86df-adcff69ce6a4" (UID: "142b3996-a07f-4236-86df-adcff69ce6a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.890102 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/142b3996-a07f-4236-86df-adcff69ce6a4-kube-api-access-l6wv9" (OuterVolumeSpecName: "kube-api-access-l6wv9") pod "142b3996-a07f-4236-86df-adcff69ce6a4" (UID: "142b3996-a07f-4236-86df-adcff69ce6a4"). InnerVolumeSpecName "kube-api-access-l6wv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.985495 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:37 crc kubenswrapper[4879]: I1125 16:14:37.985527 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6wv9\" (UniqueName: \"kubernetes.io/projected/142b3996-a07f-4236-86df-adcff69ce6a4-kube-api-access-l6wv9\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.044247 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.078793 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "142b3996-a07f-4236-86df-adcff69ce6a4" (UID: "142b3996-a07f-4236-86df-adcff69ce6a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.087207 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/142b3996-a07f-4236-86df-adcff69ce6a4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.272544 4879 generic.go:334] "Generic (PLEG): container finished" podID="142b3996-a07f-4236-86df-adcff69ce6a4" containerID="b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b" exitCode=0 Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.272585 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerDied","Data":"b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b"} Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.272619 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x9sj" event={"ID":"142b3996-a07f-4236-86df-adcff69ce6a4","Type":"ContainerDied","Data":"f2797087672869f664fa71ab92ed1a428212fb8e6c422e8d58e54119ed255ef1"} Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.272639 4879 scope.go:117] "RemoveContainer" containerID="b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.272750 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x9sj" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.315191 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x9sj"] Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.333500 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x9sj"] Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.409196 4879 scope.go:117] "RemoveContainer" containerID="568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.645754 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:14:38 crc kubenswrapper[4879]: E1125 16:14:38.646240 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.900872 4879 scope.go:117] "RemoveContainer" containerID="7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.960111 4879 scope.go:117] "RemoveContainer" containerID="b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b" Nov 25 16:14:38 crc kubenswrapper[4879]: E1125 16:14:38.961348 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b\": container with ID starting with b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b not found: ID does not exist" containerID="b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.961394 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b"} err="failed to get container status \"b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b\": rpc error: code = NotFound desc = could not find container \"b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b\": container with ID starting with b1ab99f577c377da89cbf05116e95b470c3a3bc3978ff8c1d67e588cac12e27b not found: ID does not exist" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.961425 4879 scope.go:117] "RemoveContainer" containerID="568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2" Nov 25 16:14:38 crc kubenswrapper[4879]: E1125 16:14:38.961837 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2\": container with ID starting with 568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2 not found: ID does not exist" containerID="568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.961889 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2"} err="failed to get container status \"568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2\": rpc error: code = NotFound desc = could not find container \"568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2\": container with ID starting with 568434d1b3f904e1019d7ebc315d8c28c1f8837d3361d09f22d03470620407f2 not found: ID does not exist" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.961923 4879 scope.go:117] "RemoveContainer" containerID="7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601" Nov 25 16:14:38 crc kubenswrapper[4879]: E1125 16:14:38.962265 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601\": container with ID starting with 7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601 not found: ID does not exist" containerID="7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601" Nov 25 16:14:38 crc kubenswrapper[4879]: I1125 16:14:38.962298 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601"} err="failed to get container status \"7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601\": rpc error: code = NotFound desc = could not find container \"7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601\": container with ID starting with 7f5407cc7d073b9774c3d44b8bce3f8c67bd13c899f652208df18c52b5db6601 not found: ID does not exist" Nov 25 16:14:39 crc kubenswrapper[4879]: I1125 16:14:39.657378 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" path="/var/lib/kubelet/pods/142b3996-a07f-4236-86df-adcff69ce6a4/volumes" Nov 25 16:14:40 crc kubenswrapper[4879]: I1125 16:14:40.294178 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f132884-e184-47e7-8a5e-61299b10f83b","Type":"ContainerStarted","Data":"3d1b4a2d262721d8525e82cac6143a5f9d3cb949175edba028c46bcfad4a51dd"} Nov 25 16:14:41 crc kubenswrapper[4879]: I1125 16:14:41.864140 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-58tx7" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" probeResult="failure" output=< Nov 25 16:14:41 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:14:41 crc kubenswrapper[4879]: > Nov 25 16:14:42 crc kubenswrapper[4879]: I1125 16:14:42.322977 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f132884-e184-47e7-8a5e-61299b10f83b","Type":"ContainerStarted","Data":"5714554d47d73c21dd1a84d6aa3720903b06075e7ac187a58561416e767ca7f2"} Nov 25 16:14:42 crc kubenswrapper[4879]: I1125 16:14:42.324937 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 25 16:14:42 crc kubenswrapper[4879]: I1125 16:14:42.369494 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.283291529 podStartE2EDuration="9.369472806s" podCreationTimestamp="2025-11-25 16:14:33 +0000 UTC" firstStartedPulling="2025-11-25 16:14:34.134525322 +0000 UTC m=+6565.737938393" lastFinishedPulling="2025-11-25 16:14:41.220706599 +0000 UTC m=+6572.824119670" observedRunningTime="2025-11-25 16:14:42.353103907 +0000 UTC m=+6573.956516988" watchObservedRunningTime="2025-11-25 16:14:42.369472806 +0000 UTC m=+6573.972885877" Nov 25 16:14:50 crc kubenswrapper[4879]: I1125 16:14:50.646014 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:14:50 crc kubenswrapper[4879]: E1125 16:14:50.646876 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:14:51 crc kubenswrapper[4879]: I1125 16:14:51.867545 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-58tx7" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" probeResult="failure" output=< Nov 25 16:14:51 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:14:51 crc kubenswrapper[4879]: > Nov 25 16:14:54 crc kubenswrapper[4879]: I1125 16:14:54.594860 4879 scope.go:117] "RemoveContainer" containerID="a6c1239f0ad03c18ac75cef116e46f0dfe3ef469ca43285b89867b532bc3a01a" Nov 25 16:14:54 crc kubenswrapper[4879]: I1125 16:14:54.650789 4879 scope.go:117] "RemoveContainer" containerID="5500138fde5179c471ebd7478af32d39c41c99d99ef3ef34e8eba40824e9c8fd" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.182241 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz"] Nov 25 16:15:00 crc kubenswrapper[4879]: E1125 16:15:00.183356 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="registry-server" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.183376 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="registry-server" Nov 25 16:15:00 crc kubenswrapper[4879]: E1125 16:15:00.183402 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="extract-utilities" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.183412 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="extract-utilities" Nov 25 16:15:00 crc kubenswrapper[4879]: E1125 16:15:00.183467 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="extract-content" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.183478 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="extract-content" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.183741 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="142b3996-a07f-4236-86df-adcff69ce6a4" containerName="registry-server" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.184677 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.188417 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.191904 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.196380 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz"] Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.271951 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-secret-volume\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.272496 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-config-volume\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.272634 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k92l\" (UniqueName: \"kubernetes.io/projected/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-kube-api-access-2k92l\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.374108 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-secret-volume\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.374374 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-config-volume\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.374441 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k92l\" (UniqueName: \"kubernetes.io/projected/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-kube-api-access-2k92l\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.375503 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-config-volume\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.380680 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-secret-volume\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.392383 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k92l\" (UniqueName: \"kubernetes.io/projected/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-kube-api-access-2k92l\") pod \"collect-profiles-29401455-grdjz\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.504333 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.869793 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.921217 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:15:00 crc kubenswrapper[4879]: I1125 16:15:00.984831 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz"] Nov 25 16:15:00 crc kubenswrapper[4879]: W1125 16:15:00.988448 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4f899e7_0e42_4f2b_878b_cf2c502f6bf4.slice/crio-55e789483d90fe5aa80197efd39e4a306dc090ccca66548f14258169be5e76f5 WatchSource:0}: Error finding container 55e789483d90fe5aa80197efd39e4a306dc090ccca66548f14258169be5e76f5: Status 404 returned error can't find the container with id 55e789483d90fe5aa80197efd39e4a306dc090ccca66548f14258169be5e76f5 Nov 25 16:15:01 crc kubenswrapper[4879]: I1125 16:15:01.508220 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" event={"ID":"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4","Type":"ContainerStarted","Data":"882549109ed4c91ea920d51d52f54a9ff25e356295f39b079a3521b97fa3e255"} Nov 25 16:15:01 crc kubenswrapper[4879]: I1125 16:15:01.508534 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" event={"ID":"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4","Type":"ContainerStarted","Data":"55e789483d90fe5aa80197efd39e4a306dc090ccca66548f14258169be5e76f5"} Nov 25 16:15:01 crc kubenswrapper[4879]: I1125 16:15:01.536823 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" podStartSLOduration=1.536798073 podStartE2EDuration="1.536798073s" podCreationTimestamp="2025-11-25 16:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:15:01.522151271 +0000 UTC m=+6593.125564362" watchObservedRunningTime="2025-11-25 16:15:01.536798073 +0000 UTC m=+6593.140211144" Nov 25 16:15:01 crc kubenswrapper[4879]: I1125 16:15:01.711660 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-58tx7"] Nov 25 16:15:02 crc kubenswrapper[4879]: I1125 16:15:02.518314 4879 generic.go:334] "Generic (PLEG): container finished" podID="a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" containerID="882549109ed4c91ea920d51d52f54a9ff25e356295f39b079a3521b97fa3e255" exitCode=0 Nov 25 16:15:02 crc kubenswrapper[4879]: I1125 16:15:02.518374 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" event={"ID":"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4","Type":"ContainerDied","Data":"882549109ed4c91ea920d51d52f54a9ff25e356295f39b079a3521b97fa3e255"} Nov 25 16:15:02 crc kubenswrapper[4879]: I1125 16:15:02.518587 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-58tx7" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" containerID="cri-o://97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b" gracePeriod=2 Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.152252 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.235488 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-catalog-content\") pod \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.235706 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5x4v\" (UniqueName: \"kubernetes.io/projected/f6873e0d-214a-4c7a-8a23-e91745ff04f1-kube-api-access-s5x4v\") pod \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.235862 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-utilities\") pod \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\" (UID: \"f6873e0d-214a-4c7a-8a23-e91745ff04f1\") " Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.236914 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-utilities" (OuterVolumeSpecName: "utilities") pod "f6873e0d-214a-4c7a-8a23-e91745ff04f1" (UID: "f6873e0d-214a-4c7a-8a23-e91745ff04f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.241725 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6873e0d-214a-4c7a-8a23-e91745ff04f1-kube-api-access-s5x4v" (OuterVolumeSpecName: "kube-api-access-s5x4v") pod "f6873e0d-214a-4c7a-8a23-e91745ff04f1" (UID: "f6873e0d-214a-4c7a-8a23-e91745ff04f1"). InnerVolumeSpecName "kube-api-access-s5x4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.287780 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f6873e0d-214a-4c7a-8a23-e91745ff04f1" (UID: "f6873e0d-214a-4c7a-8a23-e91745ff04f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.338154 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5x4v\" (UniqueName: \"kubernetes.io/projected/f6873e0d-214a-4c7a-8a23-e91745ff04f1-kube-api-access-s5x4v\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.338195 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.338208 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6873e0d-214a-4c7a-8a23-e91745ff04f1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.530473 4879 generic.go:334] "Generic (PLEG): container finished" podID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerID="97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b" exitCode=0 Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.530525 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerDied","Data":"97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b"} Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.530851 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-58tx7" event={"ID":"f6873e0d-214a-4c7a-8a23-e91745ff04f1","Type":"ContainerDied","Data":"c5767a383863f47e3922f86739ed1d6a56092c216a76a432e01a8d34d124a48b"} Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.530879 4879 scope.go:117] "RemoveContainer" containerID="97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.530551 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-58tx7" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.573617 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-58tx7"] Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.581149 4879 scope.go:117] "RemoveContainer" containerID="6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.583358 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-58tx7"] Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.649725 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:15:03 crc kubenswrapper[4879]: E1125 16:15:03.650003 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.658527 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" path="/var/lib/kubelet/pods/f6873e0d-214a-4c7a-8a23-e91745ff04f1/volumes" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.700590 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.718333 4879 scope.go:117] "RemoveContainer" containerID="db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.774466 4879 scope.go:117] "RemoveContainer" containerID="97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b" Nov 25 16:15:03 crc kubenswrapper[4879]: E1125 16:15:03.775249 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b\": container with ID starting with 97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b not found: ID does not exist" containerID="97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.775311 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b"} err="failed to get container status \"97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b\": rpc error: code = NotFound desc = could not find container \"97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b\": container with ID starting with 97bd7e77e40d25e8a37c91c59002a315cb7713c0502361df240d63becd59577b not found: ID does not exist" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.775341 4879 scope.go:117] "RemoveContainer" containerID="6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a" Nov 25 16:15:03 crc kubenswrapper[4879]: E1125 16:15:03.775793 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a\": container with ID starting with 6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a not found: ID does not exist" containerID="6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.775841 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a"} err="failed to get container status \"6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a\": rpc error: code = NotFound desc = could not find container \"6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a\": container with ID starting with 6c7ff4f29585c7e154d1a62314fd4d1eb02ced6ca634bb0a1ea7d22550c8ea7a not found: ID does not exist" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.775876 4879 scope.go:117] "RemoveContainer" containerID="db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f" Nov 25 16:15:03 crc kubenswrapper[4879]: E1125 16:15:03.776190 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f\": container with ID starting with db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f not found: ID does not exist" containerID="db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.776233 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f"} err="failed to get container status \"db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f\": rpc error: code = NotFound desc = could not find container \"db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f\": container with ID starting with db5d486f502087659bf17d10b0d11e746245e430b67e0289057ecaa87a62542f not found: ID does not exist" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.910064 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.953205 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k92l\" (UniqueName: \"kubernetes.io/projected/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-kube-api-access-2k92l\") pod \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.953410 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-config-volume\") pod \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.953447 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-secret-volume\") pod \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\" (UID: \"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4\") " Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.954199 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-config-volume" (OuterVolumeSpecName: "config-volume") pod "a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" (UID: "a4f899e7-0e42-4f2b-878b-cf2c502f6bf4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.959137 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" (UID: "a4f899e7-0e42-4f2b-878b-cf2c502f6bf4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:15:03 crc kubenswrapper[4879]: I1125 16:15:03.959523 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-kube-api-access-2k92l" (OuterVolumeSpecName: "kube-api-access-2k92l") pod "a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" (UID: "a4f899e7-0e42-4f2b-878b-cf2c502f6bf4"). InnerVolumeSpecName "kube-api-access-2k92l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.056551 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.056601 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.056612 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k92l\" (UniqueName: \"kubernetes.io/projected/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4-kube-api-access-2k92l\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.546191 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" event={"ID":"a4f899e7-0e42-4f2b-878b-cf2c502f6bf4","Type":"ContainerDied","Data":"55e789483d90fe5aa80197efd39e4a306dc090ccca66548f14258169be5e76f5"} Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.546231 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55e789483d90fe5aa80197efd39e4a306dc090ccca66548f14258169be5e76f5" Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.546282 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz" Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.657975 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh"] Nov 25 16:15:04 crc kubenswrapper[4879]: I1125 16:15:04.667793 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401410-vxknh"] Nov 25 16:15:05 crc kubenswrapper[4879]: I1125 16:15:05.658661 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f143b190-3df1-4505-aaf0-576be693a9ff" path="/var/lib/kubelet/pods/f143b190-3df1-4505-aaf0-576be693a9ff/volumes" Nov 25 16:15:16 crc kubenswrapper[4879]: I1125 16:15:16.644992 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:15:16 crc kubenswrapper[4879]: E1125 16:15:16.645946 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:15:28 crc kubenswrapper[4879]: I1125 16:15:28.645157 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:15:28 crc kubenswrapper[4879]: E1125 16:15:28.646018 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.279776 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57cdb84995-crssv"] Nov 25 16:15:29 crc kubenswrapper[4879]: E1125 16:15:29.280336 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="extract-content" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.280366 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="extract-content" Nov 25 16:15:29 crc kubenswrapper[4879]: E1125 16:15:29.280384 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" containerName="collect-profiles" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.280393 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" containerName="collect-profiles" Nov 25 16:15:29 crc kubenswrapper[4879]: E1125 16:15:29.280424 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.280431 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" Nov 25 16:15:29 crc kubenswrapper[4879]: E1125 16:15:29.280451 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="extract-utilities" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.280461 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="extract-utilities" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.280778 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6873e0d-214a-4c7a-8a23-e91745ff04f1" containerName="registry-server" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.280798 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" containerName="collect-profiles" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.282256 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.287295 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.299348 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57cdb84995-crssv"] Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.340071 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-config\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.340115 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfk4n\" (UniqueName: \"kubernetes.io/projected/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-kube-api-access-mfk4n\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.340212 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-openstack-cell1\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.340265 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-dns-svc\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.340308 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-sb\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.340335 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-nb\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.442439 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-config\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.442776 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfk4n\" (UniqueName: \"kubernetes.io/projected/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-kube-api-access-mfk4n\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.442956 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-openstack-cell1\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.443117 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-dns-svc\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.443285 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-sb\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.443394 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-nb\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.443402 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-config\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.443957 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-openstack-cell1\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.443997 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-dns-svc\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.444072 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-nb\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.445646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-sb\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.460643 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfk4n\" (UniqueName: \"kubernetes.io/projected/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-kube-api-access-mfk4n\") pod \"dnsmasq-dns-57cdb84995-crssv\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:29 crc kubenswrapper[4879]: I1125 16:15:29.601216 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:30 crc kubenswrapper[4879]: I1125 16:15:30.079934 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57cdb84995-crssv"] Nov 25 16:15:30 crc kubenswrapper[4879]: I1125 16:15:30.846964 4879 generic.go:334] "Generic (PLEG): container finished" podID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerID="bf2ae589c2ec7df5ad754e8d26254e1ab6207e10a2ab4555e79a7b99090780a7" exitCode=0 Nov 25 16:15:30 crc kubenswrapper[4879]: I1125 16:15:30.847300 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57cdb84995-crssv" event={"ID":"943e7dfb-fd1d-4898-b3b1-11c4622f2c71","Type":"ContainerDied","Data":"bf2ae589c2ec7df5ad754e8d26254e1ab6207e10a2ab4555e79a7b99090780a7"} Nov 25 16:15:30 crc kubenswrapper[4879]: I1125 16:15:30.847350 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57cdb84995-crssv" event={"ID":"943e7dfb-fd1d-4898-b3b1-11c4622f2c71","Type":"ContainerStarted","Data":"141630cce86c0ebae0555364a66c3e9889124851769dbb89d25fe6f502c8c53b"} Nov 25 16:15:31 crc kubenswrapper[4879]: I1125 16:15:31.858481 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57cdb84995-crssv" event={"ID":"943e7dfb-fd1d-4898-b3b1-11c4622f2c71","Type":"ContainerStarted","Data":"7f22627a4e5228ed81c901cd563c85534df3d1e7d3ead6e5c731068c9e790a74"} Nov 25 16:15:31 crc kubenswrapper[4879]: I1125 16:15:31.858854 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:31 crc kubenswrapper[4879]: I1125 16:15:31.881430 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57cdb84995-crssv" podStartSLOduration=2.881414167 podStartE2EDuration="2.881414167s" podCreationTimestamp="2025-11-25 16:15:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:15:31.878793323 +0000 UTC m=+6623.482206394" watchObservedRunningTime="2025-11-25 16:15:31.881414167 +0000 UTC m=+6623.484827228" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.603394 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.673952 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:15:39 crc kubenswrapper[4879]: E1125 16:15:39.679088 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.700700 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-547577b957-5mvx2"] Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.701259 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-547577b957-5mvx2" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerName="dnsmasq-dns" containerID="cri-o://3c119bc159edb7d6f16d1c67b5b8f49cd25d24e955c260b1b90d9f09501d5d71" gracePeriod=10 Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.859598 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c8c87dd7f-pftfx"] Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.867995 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.876538 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c8c87dd7f-pftfx"] Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.945939 4879 generic.go:334] "Generic (PLEG): container finished" podID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerID="3c119bc159edb7d6f16d1c67b5b8f49cd25d24e955c260b1b90d9f09501d5d71" exitCode=0 Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.945991 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547577b957-5mvx2" event={"ID":"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2","Type":"ContainerDied","Data":"3c119bc159edb7d6f16d1c67b5b8f49cd25d24e955c260b1b90d9f09501d5d71"} Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.976529 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jhw2\" (UniqueName: \"kubernetes.io/projected/1e038d40-0bea-40b2-ad72-c77384d9a39e-kube-api-access-7jhw2\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.976619 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-dns-svc\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.976666 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-openstack-cell1\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.976691 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-ovsdbserver-nb\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.976715 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-ovsdbserver-sb\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:39 crc kubenswrapper[4879]: I1125 16:15:39.976754 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-config\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.079443 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-openstack-cell1\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.079773 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-ovsdbserver-nb\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.079808 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-ovsdbserver-sb\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.079849 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-config\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.080027 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jhw2\" (UniqueName: \"kubernetes.io/projected/1e038d40-0bea-40b2-ad72-c77384d9a39e-kube-api-access-7jhw2\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.080086 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-dns-svc\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.080452 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-openstack-cell1\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.081031 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-dns-svc\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.081142 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-ovsdbserver-sb\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.081907 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-config\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.081991 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e038d40-0bea-40b2-ad72-c77384d9a39e-ovsdbserver-nb\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.101763 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jhw2\" (UniqueName: \"kubernetes.io/projected/1e038d40-0bea-40b2-ad72-c77384d9a39e-kube-api-access-7jhw2\") pod \"dnsmasq-dns-c8c87dd7f-pftfx\" (UID: \"1e038d40-0bea-40b2-ad72-c77384d9a39e\") " pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.199403 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.334723 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.386615 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-nb\") pod \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.386728 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-dns-svc\") pod \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.386838 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-sb\") pod \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.386878 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msb5k\" (UniqueName: \"kubernetes.io/projected/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-kube-api-access-msb5k\") pod \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.386895 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-config\") pod \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\" (UID: \"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2\") " Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.397734 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-kube-api-access-msb5k" (OuterVolumeSpecName: "kube-api-access-msb5k") pod "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" (UID: "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2"). InnerVolumeSpecName "kube-api-access-msb5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.443532 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" (UID: "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.451823 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-config" (OuterVolumeSpecName: "config") pod "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" (UID: "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.454905 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" (UID: "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.465024 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" (UID: "027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.491090 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.491164 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.491180 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msb5k\" (UniqueName: \"kubernetes.io/projected/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-kube-api-access-msb5k\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.491193 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.491201 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.691061 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c8c87dd7f-pftfx"] Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.961295 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-547577b957-5mvx2" event={"ID":"027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2","Type":"ContainerDied","Data":"90f51d7443d7dec8fb5b8e137fbbcf2fbf5f1a4527602da74c3fc7f089ec1ae4"} Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.961656 4879 scope.go:117] "RemoveContainer" containerID="3c119bc159edb7d6f16d1c67b5b8f49cd25d24e955c260b1b90d9f09501d5d71" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.961828 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-547577b957-5mvx2" Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.971882 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" event={"ID":"1e038d40-0bea-40b2-ad72-c77384d9a39e","Type":"ContainerStarted","Data":"044e4a45cde645cb3978fbc45e4cbd116a98dd65392c337ab9aa60ea37201b20"} Nov 25 16:15:40 crc kubenswrapper[4879]: I1125 16:15:40.971917 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" event={"ID":"1e038d40-0bea-40b2-ad72-c77384d9a39e","Type":"ContainerStarted","Data":"5b77d776934e9bf8fd49a39924a706ee9b9ea40deec3a106e9758ada55032d4d"} Nov 25 16:15:41 crc kubenswrapper[4879]: I1125 16:15:41.060113 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-547577b957-5mvx2"] Nov 25 16:15:41 crc kubenswrapper[4879]: I1125 16:15:41.061074 4879 scope.go:117] "RemoveContainer" containerID="a9dc63bd8c2c75762eaba481fde100256208f48befc08719e5f2d9d19a8f3ea8" Nov 25 16:15:41 crc kubenswrapper[4879]: I1125 16:15:41.068671 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-547577b957-5mvx2"] Nov 25 16:15:41 crc kubenswrapper[4879]: I1125 16:15:41.656215 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" path="/var/lib/kubelet/pods/027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2/volumes" Nov 25 16:15:41 crc kubenswrapper[4879]: I1125 16:15:41.985899 4879 generic.go:334] "Generic (PLEG): container finished" podID="1e038d40-0bea-40b2-ad72-c77384d9a39e" containerID="044e4a45cde645cb3978fbc45e4cbd116a98dd65392c337ab9aa60ea37201b20" exitCode=0 Nov 25 16:15:41 crc kubenswrapper[4879]: I1125 16:15:41.985978 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" event={"ID":"1e038d40-0bea-40b2-ad72-c77384d9a39e","Type":"ContainerDied","Data":"044e4a45cde645cb3978fbc45e4cbd116a98dd65392c337ab9aa60ea37201b20"} Nov 25 16:15:42 crc kubenswrapper[4879]: I1125 16:15:42.998402 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" event={"ID":"1e038d40-0bea-40b2-ad72-c77384d9a39e","Type":"ContainerStarted","Data":"b98b90dfe238bf580a7647ff32168d1e0a7dca3c4c3a1f6f7bd3ad52e6f5d7a3"} Nov 25 16:15:42 crc kubenswrapper[4879]: I1125 16:15:42.998965 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:43 crc kubenswrapper[4879]: I1125 16:15:43.016164 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" podStartSLOduration=4.016142182 podStartE2EDuration="4.016142182s" podCreationTimestamp="2025-11-25 16:15:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:15:43.013787546 +0000 UTC m=+6634.617200617" watchObservedRunningTime="2025-11-25 16:15:43.016142182 +0000 UTC m=+6634.619555253" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.524748 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg"] Nov 25 16:15:45 crc kubenswrapper[4879]: E1125 16:15:45.525469 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerName="init" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.525482 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerName="init" Nov 25 16:15:45 crc kubenswrapper[4879]: E1125 16:15:45.525492 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerName="dnsmasq-dns" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.525498 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerName="dnsmasq-dns" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.525715 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="027cbd6e-f6f6-4ae6-abf5-bf3a1184cea2" containerName="dnsmasq-dns" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.526638 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.530231 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.530338 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.530486 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.532048 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.539321 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg"] Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.596378 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75ng8\" (UniqueName: \"kubernetes.io/projected/a31b4be8-7620-4231-a877-f2755303c565-kube-api-access-75ng8\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.596437 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.596533 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.596835 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.597238 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.699216 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.699351 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.699383 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75ng8\" (UniqueName: \"kubernetes.io/projected/a31b4be8-7620-4231-a877-f2755303c565-kube-api-access-75ng8\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.699403 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.699437 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.704960 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.705487 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.706777 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.715323 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.715520 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75ng8\" (UniqueName: \"kubernetes.io/projected/a31b4be8-7620-4231-a877-f2755303c565-kube-api-access-75ng8\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:45 crc kubenswrapper[4879]: I1125 16:15:45.853698 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:15:46 crc kubenswrapper[4879]: I1125 16:15:46.364555 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg"] Nov 25 16:15:46 crc kubenswrapper[4879]: I1125 16:15:46.373329 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:15:47 crc kubenswrapper[4879]: I1125 16:15:47.049897 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" event={"ID":"a31b4be8-7620-4231-a877-f2755303c565","Type":"ContainerStarted","Data":"ef7a9343570886109fe77788467e7e30b0a0fc1123186e1d2ce263e426e9fb0d"} Nov 25 16:15:50 crc kubenswrapper[4879]: I1125 16:15:50.201503 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c8c87dd7f-pftfx" Nov 25 16:15:50 crc kubenswrapper[4879]: I1125 16:15:50.268100 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57cdb84995-crssv"] Nov 25 16:15:50 crc kubenswrapper[4879]: I1125 16:15:50.268462 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57cdb84995-crssv" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="dnsmasq-dns" containerID="cri-o://7f22627a4e5228ed81c901cd563c85534df3d1e7d3ead6e5c731068c9e790a74" gracePeriod=10 Nov 25 16:15:51 crc kubenswrapper[4879]: I1125 16:15:51.094839 4879 generic.go:334] "Generic (PLEG): container finished" podID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerID="7f22627a4e5228ed81c901cd563c85534df3d1e7d3ead6e5c731068c9e790a74" exitCode=0 Nov 25 16:15:51 crc kubenswrapper[4879]: I1125 16:15:51.095249 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57cdb84995-crssv" event={"ID":"943e7dfb-fd1d-4898-b3b1-11c4622f2c71","Type":"ContainerDied","Data":"7f22627a4e5228ed81c901cd563c85534df3d1e7d3ead6e5c731068c9e790a74"} Nov 25 16:15:51 crc kubenswrapper[4879]: I1125 16:15:51.644937 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:15:51 crc kubenswrapper[4879]: E1125 16:15:51.645209 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:15:54 crc kubenswrapper[4879]: I1125 16:15:54.602511 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57cdb84995-crssv" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.157:5353: connect: connection refused" Nov 25 16:15:54 crc kubenswrapper[4879]: I1125 16:15:54.907369 4879 scope.go:117] "RemoveContainer" containerID="5bab8a4368329fc30e4d3e883a2e672eeed965fb7d4f5d4750e2170887ee4340" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.080698 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.132388 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-openstack-cell1\") pod \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.132469 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-config\") pod \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.132581 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-dns-svc\") pod \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.132606 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-nb\") pod \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.132689 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-sb\") pod \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.132738 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfk4n\" (UniqueName: \"kubernetes.io/projected/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-kube-api-access-mfk4n\") pod \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\" (UID: \"943e7dfb-fd1d-4898-b3b1-11c4622f2c71\") " Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.140358 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-kube-api-access-mfk4n" (OuterVolumeSpecName: "kube-api-access-mfk4n") pod "943e7dfb-fd1d-4898-b3b1-11c4622f2c71" (UID: "943e7dfb-fd1d-4898-b3b1-11c4622f2c71"). InnerVolumeSpecName "kube-api-access-mfk4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.162453 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57cdb84995-crssv" event={"ID":"943e7dfb-fd1d-4898-b3b1-11c4622f2c71","Type":"ContainerDied","Data":"141630cce86c0ebae0555364a66c3e9889124851769dbb89d25fe6f502c8c53b"} Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.162513 4879 scope.go:117] "RemoveContainer" containerID="7f22627a4e5228ed81c901cd563c85534df3d1e7d3ead6e5c731068c9e790a74" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.162659 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57cdb84995-crssv" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.167950 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" event={"ID":"a31b4be8-7620-4231-a877-f2755303c565","Type":"ContainerStarted","Data":"522ab2d872989b60cf5c17582217b8cdbc6473991eadf0f356c67eb86548d47f"} Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.187844 4879 scope.go:117] "RemoveContainer" containerID="bf2ae589c2ec7df5ad754e8d26254e1ab6207e10a2ab4555e79a7b99090780a7" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.194945 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" podStartSLOduration=1.7178734740000001 podStartE2EDuration="11.194925043s" podCreationTimestamp="2025-11-25 16:15:45 +0000 UTC" firstStartedPulling="2025-11-25 16:15:46.373151367 +0000 UTC m=+6637.976564438" lastFinishedPulling="2025-11-25 16:15:55.850202936 +0000 UTC m=+6647.453616007" observedRunningTime="2025-11-25 16:15:56.188309718 +0000 UTC m=+6647.791722789" watchObservedRunningTime="2025-11-25 16:15:56.194925043 +0000 UTC m=+6647.798338114" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.205147 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-config" (OuterVolumeSpecName: "config") pod "943e7dfb-fd1d-4898-b3b1-11c4622f2c71" (UID: "943e7dfb-fd1d-4898-b3b1-11c4622f2c71"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.209877 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "943e7dfb-fd1d-4898-b3b1-11c4622f2c71" (UID: "943e7dfb-fd1d-4898-b3b1-11c4622f2c71"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.210476 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "943e7dfb-fd1d-4898-b3b1-11c4622f2c71" (UID: "943e7dfb-fd1d-4898-b3b1-11c4622f2c71"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.211234 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "943e7dfb-fd1d-4898-b3b1-11c4622f2c71" (UID: "943e7dfb-fd1d-4898-b3b1-11c4622f2c71"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.211560 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "943e7dfb-fd1d-4898-b3b1-11c4622f2c71" (UID: "943e7dfb-fd1d-4898-b3b1-11c4622f2c71"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.234082 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.234139 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfk4n\" (UniqueName: \"kubernetes.io/projected/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-kube-api-access-mfk4n\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.234156 4879 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.234168 4879 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-config\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.234180 4879 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.234190 4879 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/943e7dfb-fd1d-4898-b3b1-11c4622f2c71-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.502775 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57cdb84995-crssv"] Nov 25 16:15:56 crc kubenswrapper[4879]: I1125 16:15:56.511817 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57cdb84995-crssv"] Nov 25 16:15:57 crc kubenswrapper[4879]: I1125 16:15:57.656234 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" path="/var/lib/kubelet/pods/943e7dfb-fd1d-4898-b3b1-11c4622f2c71/volumes" Nov 25 16:16:04 crc kubenswrapper[4879]: I1125 16:16:04.645795 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:16:04 crc kubenswrapper[4879]: E1125 16:16:04.646789 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:16:09 crc kubenswrapper[4879]: I1125 16:16:09.309752 4879 generic.go:334] "Generic (PLEG): container finished" podID="a31b4be8-7620-4231-a877-f2755303c565" containerID="522ab2d872989b60cf5c17582217b8cdbc6473991eadf0f356c67eb86548d47f" exitCode=0 Nov 25 16:16:09 crc kubenswrapper[4879]: I1125 16:16:09.309829 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" event={"ID":"a31b4be8-7620-4231-a877-f2755303c565","Type":"ContainerDied","Data":"522ab2d872989b60cf5c17582217b8cdbc6473991eadf0f356c67eb86548d47f"} Nov 25 16:16:10 crc kubenswrapper[4879]: I1125 16:16:10.940559 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.033605 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-inventory\") pod \"a31b4be8-7620-4231-a877-f2755303c565\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.034018 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75ng8\" (UniqueName: \"kubernetes.io/projected/a31b4be8-7620-4231-a877-f2755303c565-kube-api-access-75ng8\") pod \"a31b4be8-7620-4231-a877-f2755303c565\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.034048 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ceph\") pod \"a31b4be8-7620-4231-a877-f2755303c565\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.034088 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ssh-key\") pod \"a31b4be8-7620-4231-a877-f2755303c565\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.034296 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-pre-adoption-validation-combined-ca-bundle\") pod \"a31b4be8-7620-4231-a877-f2755303c565\" (UID: \"a31b4be8-7620-4231-a877-f2755303c565\") " Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.039618 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31b4be8-7620-4231-a877-f2755303c565-kube-api-access-75ng8" (OuterVolumeSpecName: "kube-api-access-75ng8") pod "a31b4be8-7620-4231-a877-f2755303c565" (UID: "a31b4be8-7620-4231-a877-f2755303c565"). InnerVolumeSpecName "kube-api-access-75ng8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.039639 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "a31b4be8-7620-4231-a877-f2755303c565" (UID: "a31b4be8-7620-4231-a877-f2755303c565"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.039784 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ceph" (OuterVolumeSpecName: "ceph") pod "a31b4be8-7620-4231-a877-f2755303c565" (UID: "a31b4be8-7620-4231-a877-f2755303c565"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.064760 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-inventory" (OuterVolumeSpecName: "inventory") pod "a31b4be8-7620-4231-a877-f2755303c565" (UID: "a31b4be8-7620-4231-a877-f2755303c565"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.070489 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a31b4be8-7620-4231-a877-f2755303c565" (UID: "a31b4be8-7620-4231-a877-f2755303c565"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.136145 4879 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.136191 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.136205 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75ng8\" (UniqueName: \"kubernetes.io/projected/a31b4be8-7620-4231-a877-f2755303c565-kube-api-access-75ng8\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.136315 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.136324 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a31b4be8-7620-4231-a877-f2755303c565-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.335664 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" event={"ID":"a31b4be8-7620-4231-a877-f2755303c565","Type":"ContainerDied","Data":"ef7a9343570886109fe77788467e7e30b0a0fc1123186e1d2ce263e426e9fb0d"} Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.335713 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef7a9343570886109fe77788467e7e30b0a0fc1123186e1d2ce263e426e9fb0d" Nov 25 16:16:11 crc kubenswrapper[4879]: I1125 16:16:11.335720 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.903990 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf"] Nov 25 16:16:12 crc kubenswrapper[4879]: E1125 16:16:12.904933 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="init" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.904950 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="init" Nov 25 16:16:12 crc kubenswrapper[4879]: E1125 16:16:12.904981 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a31b4be8-7620-4231-a877-f2755303c565" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.904992 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a31b4be8-7620-4231-a877-f2755303c565" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 25 16:16:12 crc kubenswrapper[4879]: E1125 16:16:12.905023 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="dnsmasq-dns" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.905031 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="dnsmasq-dns" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.905293 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="943e7dfb-fd1d-4898-b3b1-11c4622f2c71" containerName="dnsmasq-dns" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.905325 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a31b4be8-7620-4231-a877-f2755303c565" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.906097 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.909960 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.910180 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.910340 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.913508 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:16:12 crc kubenswrapper[4879]: I1125 16:16:12.916168 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf"] Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.077079 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.077198 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.077254 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.077286 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.077325 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clvs6\" (UniqueName: \"kubernetes.io/projected/2101fdfc-c211-4108-aaab-562995e85279-kube-api-access-clvs6\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.179469 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.179528 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.179568 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clvs6\" (UniqueName: \"kubernetes.io/projected/2101fdfc-c211-4108-aaab-562995e85279-kube-api-access-clvs6\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.179627 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.179702 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.185959 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.186260 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.186926 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.187214 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.196518 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clvs6\" (UniqueName: \"kubernetes.io/projected/2101fdfc-c211-4108-aaab-562995e85279-kube-api-access-clvs6\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.227336 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:16:13 crc kubenswrapper[4879]: I1125 16:16:13.741307 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf"] Nov 25 16:16:14 crc kubenswrapper[4879]: I1125 16:16:14.368839 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" event={"ID":"2101fdfc-c211-4108-aaab-562995e85279","Type":"ContainerStarted","Data":"541c0be717e881d73bf2b7553c3d90b2599f95535ce47fcf5243648a55c6a23c"} Nov 25 16:16:15 crc kubenswrapper[4879]: I1125 16:16:15.380341 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" event={"ID":"2101fdfc-c211-4108-aaab-562995e85279","Type":"ContainerStarted","Data":"ba524094a0a5239a797a49dfd2d7e4dab4fb034098124fc3d0fc80cbc16dfe70"} Nov 25 16:16:19 crc kubenswrapper[4879]: I1125 16:16:19.653425 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:16:19 crc kubenswrapper[4879]: E1125 16:16:19.654076 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:16:34 crc kubenswrapper[4879]: I1125 16:16:34.646915 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:16:34 crc kubenswrapper[4879]: E1125 16:16:34.648317 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.463363 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" podStartSLOduration=24.034216784 podStartE2EDuration="24.463346209s" podCreationTimestamp="2025-11-25 16:16:12 +0000 UTC" firstStartedPulling="2025-11-25 16:16:13.747444541 +0000 UTC m=+6665.350857612" lastFinishedPulling="2025-11-25 16:16:14.176573966 +0000 UTC m=+6665.779987037" observedRunningTime="2025-11-25 16:16:15.434707993 +0000 UTC m=+6667.038121064" watchObservedRunningTime="2025-11-25 16:16:36.463346209 +0000 UTC m=+6688.066759280" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.466205 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7fkqm"] Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.468838 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.478112 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7fkqm"] Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.590286 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6gbb\" (UniqueName: \"kubernetes.io/projected/8a39a149-1e29-4b2e-b90a-a39f857874df-kube-api-access-c6gbb\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.590419 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-utilities\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.590468 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-catalog-content\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.693367 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6gbb\" (UniqueName: \"kubernetes.io/projected/8a39a149-1e29-4b2e-b90a-a39f857874df-kube-api-access-c6gbb\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.693601 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-utilities\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.693716 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-catalog-content\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.694441 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-utilities\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.694473 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-catalog-content\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.716445 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6gbb\" (UniqueName: \"kubernetes.io/projected/8a39a149-1e29-4b2e-b90a-a39f857874df-kube-api-access-c6gbb\") pod \"redhat-operators-7fkqm\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:36 crc kubenswrapper[4879]: I1125 16:16:36.792281 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:37 crc kubenswrapper[4879]: I1125 16:16:37.239686 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7fkqm"] Nov 25 16:16:37 crc kubenswrapper[4879]: I1125 16:16:37.614096 4879 generic.go:334] "Generic (PLEG): container finished" podID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerID="ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a" exitCode=0 Nov 25 16:16:37 crc kubenswrapper[4879]: I1125 16:16:37.614498 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerDied","Data":"ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a"} Nov 25 16:16:37 crc kubenswrapper[4879]: I1125 16:16:37.614550 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerStarted","Data":"9f4e784b8147e190cce84964c75b7392475cd01af84739f10df67e3a22297daa"} Nov 25 16:16:38 crc kubenswrapper[4879]: I1125 16:16:38.633409 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerStarted","Data":"9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9"} Nov 25 16:16:44 crc kubenswrapper[4879]: I1125 16:16:44.690299 4879 generic.go:334] "Generic (PLEG): container finished" podID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerID="9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9" exitCode=0 Nov 25 16:16:44 crc kubenswrapper[4879]: I1125 16:16:44.690414 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerDied","Data":"9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9"} Nov 25 16:16:46 crc kubenswrapper[4879]: I1125 16:16:46.714536 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerStarted","Data":"06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a"} Nov 25 16:16:46 crc kubenswrapper[4879]: I1125 16:16:46.735868 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7fkqm" podStartSLOduration=2.649796505 podStartE2EDuration="10.735845428s" podCreationTimestamp="2025-11-25 16:16:36 +0000 UTC" firstStartedPulling="2025-11-25 16:16:37.61792793 +0000 UTC m=+6689.221341001" lastFinishedPulling="2025-11-25 16:16:45.703976853 +0000 UTC m=+6697.307389924" observedRunningTime="2025-11-25 16:16:46.731643801 +0000 UTC m=+6698.335056872" watchObservedRunningTime="2025-11-25 16:16:46.735845428 +0000 UTC m=+6698.339258499" Nov 25 16:16:46 crc kubenswrapper[4879]: I1125 16:16:46.793388 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:46 crc kubenswrapper[4879]: I1125 16:16:46.793458 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:47 crc kubenswrapper[4879]: I1125 16:16:47.289701 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-hjvjm"] Nov 25 16:16:47 crc kubenswrapper[4879]: I1125 16:16:47.305821 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-hjvjm"] Nov 25 16:16:47 crc kubenswrapper[4879]: I1125 16:16:47.782351 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e841bc3b-e1b7-4c00-b207-f4f66c225be9" path="/var/lib/kubelet/pods/e841bc3b-e1b7-4c00-b207-f4f66c225be9/volumes" Nov 25 16:16:47 crc kubenswrapper[4879]: I1125 16:16:47.847143 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7fkqm" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="registry-server" probeResult="failure" output=< Nov 25 16:16:47 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:16:47 crc kubenswrapper[4879]: > Nov 25 16:16:48 crc kubenswrapper[4879]: I1125 16:16:48.030027 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-3f0a-account-create-gj9n8"] Nov 25 16:16:48 crc kubenswrapper[4879]: I1125 16:16:48.041007 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-3f0a-account-create-gj9n8"] Nov 25 16:16:49 crc kubenswrapper[4879]: I1125 16:16:49.653809 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:16:49 crc kubenswrapper[4879]: E1125 16:16:49.662280 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:16:49 crc kubenswrapper[4879]: I1125 16:16:49.669054 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="420b294c-2281-48eb-9339-d5fbe28c57b3" path="/var/lib/kubelet/pods/420b294c-2281-48eb-9339-d5fbe28c57b3/volumes" Nov 25 16:16:53 crc kubenswrapper[4879]: I1125 16:16:53.025734 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-8fh2s"] Nov 25 16:16:53 crc kubenswrapper[4879]: I1125 16:16:53.036303 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-8fh2s"] Nov 25 16:16:53 crc kubenswrapper[4879]: I1125 16:16:53.671317 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43b99a4e-a3e4-4129-816c-e139e5806ec1" path="/var/lib/kubelet/pods/43b99a4e-a3e4-4129-816c-e139e5806ec1/volumes" Nov 25 16:16:54 crc kubenswrapper[4879]: I1125 16:16:54.024495 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-c616-account-create-h586d"] Nov 25 16:16:54 crc kubenswrapper[4879]: I1125 16:16:54.037270 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-c616-account-create-h586d"] Nov 25 16:16:55 crc kubenswrapper[4879]: I1125 16:16:55.659329 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d347fc00-931a-40cf-83ca-07761d0a217c" path="/var/lib/kubelet/pods/d347fc00-931a-40cf-83ca-07761d0a217c/volumes" Nov 25 16:16:55 crc kubenswrapper[4879]: I1125 16:16:55.849145 4879 scope.go:117] "RemoveContainer" containerID="ebf17fa06c11020ad972f12b814444b371a00d03f9ef458bd1d4bdfa5aea08c0" Nov 25 16:16:55 crc kubenswrapper[4879]: I1125 16:16:55.875466 4879 scope.go:117] "RemoveContainer" containerID="6e8684bb91fceb1ed97148d20455dedbb54f41ef47574c3f53c647c80a5b4256" Nov 25 16:16:55 crc kubenswrapper[4879]: I1125 16:16:55.928881 4879 scope.go:117] "RemoveContainer" containerID="5230f5a06017969bd201f7aac98cf3d5009f6f4b3f958e378ab3d41b4d2154c8" Nov 25 16:16:55 crc kubenswrapper[4879]: I1125 16:16:55.990954 4879 scope.go:117] "RemoveContainer" containerID="4e340da432b6a6de1251e61d3036fdee65596bb25210a4c73038b49be4590453" Nov 25 16:16:56 crc kubenswrapper[4879]: I1125 16:16:56.842061 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:56 crc kubenswrapper[4879]: I1125 16:16:56.889675 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:57 crc kubenswrapper[4879]: I1125 16:16:57.656653 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7fkqm"] Nov 25 16:16:58 crc kubenswrapper[4879]: I1125 16:16:58.837841 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7fkqm" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="registry-server" containerID="cri-o://06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a" gracePeriod=2 Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.401260 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.481289 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6gbb\" (UniqueName: \"kubernetes.io/projected/8a39a149-1e29-4b2e-b90a-a39f857874df-kube-api-access-c6gbb\") pod \"8a39a149-1e29-4b2e-b90a-a39f857874df\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.481370 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-utilities\") pod \"8a39a149-1e29-4b2e-b90a-a39f857874df\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.481440 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-catalog-content\") pod \"8a39a149-1e29-4b2e-b90a-a39f857874df\" (UID: \"8a39a149-1e29-4b2e-b90a-a39f857874df\") " Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.482294 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-utilities" (OuterVolumeSpecName: "utilities") pod "8a39a149-1e29-4b2e-b90a-a39f857874df" (UID: "8a39a149-1e29-4b2e-b90a-a39f857874df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.495769 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a39a149-1e29-4b2e-b90a-a39f857874df-kube-api-access-c6gbb" (OuterVolumeSpecName: "kube-api-access-c6gbb") pod "8a39a149-1e29-4b2e-b90a-a39f857874df" (UID: "8a39a149-1e29-4b2e-b90a-a39f857874df"). InnerVolumeSpecName "kube-api-access-c6gbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.574240 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a39a149-1e29-4b2e-b90a-a39f857874df" (UID: "8a39a149-1e29-4b2e-b90a-a39f857874df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.583812 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6gbb\" (UniqueName: \"kubernetes.io/projected/8a39a149-1e29-4b2e-b90a-a39f857874df-kube-api-access-c6gbb\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.583852 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.583862 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a39a149-1e29-4b2e-b90a-a39f857874df-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.849063 4879 generic.go:334] "Generic (PLEG): container finished" podID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerID="06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a" exitCode=0 Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.849109 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerDied","Data":"06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a"} Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.849162 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7fkqm" event={"ID":"8a39a149-1e29-4b2e-b90a-a39f857874df","Type":"ContainerDied","Data":"9f4e784b8147e190cce84964c75b7392475cd01af84739f10df67e3a22297daa"} Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.849184 4879 scope.go:117] "RemoveContainer" containerID="06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.849298 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7fkqm" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.874554 4879 scope.go:117] "RemoveContainer" containerID="9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.875604 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7fkqm"] Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.885434 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7fkqm"] Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.895890 4879 scope.go:117] "RemoveContainer" containerID="ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.942073 4879 scope.go:117] "RemoveContainer" containerID="06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a" Nov 25 16:16:59 crc kubenswrapper[4879]: E1125 16:16:59.947036 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a\": container with ID starting with 06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a not found: ID does not exist" containerID="06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.947094 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a"} err="failed to get container status \"06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a\": rpc error: code = NotFound desc = could not find container \"06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a\": container with ID starting with 06f6b46311938b2b4dd291a03e1b1b3cf6f1354e86785dd8207bf76796d6c91a not found: ID does not exist" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.947150 4879 scope.go:117] "RemoveContainer" containerID="9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9" Nov 25 16:16:59 crc kubenswrapper[4879]: E1125 16:16:59.947694 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9\": container with ID starting with 9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9 not found: ID does not exist" containerID="9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.947740 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9"} err="failed to get container status \"9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9\": rpc error: code = NotFound desc = could not find container \"9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9\": container with ID starting with 9bebe66b70fa3da305af76e9a9dab527ad98b2ff7be3a0c097589dce71dc25b9 not found: ID does not exist" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.947778 4879 scope.go:117] "RemoveContainer" containerID="ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a" Nov 25 16:16:59 crc kubenswrapper[4879]: E1125 16:16:59.948724 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a\": container with ID starting with ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a not found: ID does not exist" containerID="ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a" Nov 25 16:16:59 crc kubenswrapper[4879]: I1125 16:16:59.948753 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a"} err="failed to get container status \"ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a\": rpc error: code = NotFound desc = could not find container \"ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a\": container with ID starting with ee76b800b124e954c850c2ed58c73aa0ce7584583f2d19a367b3a5ac6a35155a not found: ID does not exist" Nov 25 16:17:01 crc kubenswrapper[4879]: I1125 16:17:01.659661 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" path="/var/lib/kubelet/pods/8a39a149-1e29-4b2e-b90a-a39f857874df/volumes" Nov 25 16:17:03 crc kubenswrapper[4879]: I1125 16:17:03.645577 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:17:03 crc kubenswrapper[4879]: E1125 16:17:03.646151 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:17:18 crc kubenswrapper[4879]: I1125 16:17:18.645502 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:17:18 crc kubenswrapper[4879]: E1125 16:17:18.646332 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:17:29 crc kubenswrapper[4879]: I1125 16:17:29.652827 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:17:29 crc kubenswrapper[4879]: E1125 16:17:29.653636 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:17:37 crc kubenswrapper[4879]: I1125 16:17:37.039936 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-r72bb"] Nov 25 16:17:37 crc kubenswrapper[4879]: I1125 16:17:37.051445 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-r72bb"] Nov 25 16:17:37 crc kubenswrapper[4879]: I1125 16:17:37.658371 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48e59a94-f34e-428a-8695-2f005b4804c5" path="/var/lib/kubelet/pods/48e59a94-f34e-428a-8695-2f005b4804c5/volumes" Nov 25 16:17:40 crc kubenswrapper[4879]: I1125 16:17:40.645831 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:17:40 crc kubenswrapper[4879]: E1125 16:17:40.646424 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:17:54 crc kubenswrapper[4879]: I1125 16:17:54.644805 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:17:55 crc kubenswrapper[4879]: I1125 16:17:55.358372 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"0849b9e419300bb4f5ce41bd03728d5d705289e312f8bcc42e36492712bebfde"} Nov 25 16:17:56 crc kubenswrapper[4879]: I1125 16:17:56.149824 4879 scope.go:117] "RemoveContainer" containerID="764eb2d23aecc1a91db7cce42e187c1a4b08d1104a56887f6c449ca5c4f3c924" Nov 25 16:17:56 crc kubenswrapper[4879]: I1125 16:17:56.199615 4879 scope.go:117] "RemoveContainer" containerID="116ee236e2a0d82331df17a180456d133adb4bc22bb44c1bef5209cb1eb5c672" Nov 25 16:20:17 crc kubenswrapper[4879]: I1125 16:20:17.409211 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:20:17 crc kubenswrapper[4879]: I1125 16:20:17.409731 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:20:22 crc kubenswrapper[4879]: I1125 16:20:22.053860 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-a082-account-create-q8ptq"] Nov 25 16:20:22 crc kubenswrapper[4879]: I1125 16:20:22.064944 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-5w25m"] Nov 25 16:20:22 crc kubenswrapper[4879]: I1125 16:20:22.075320 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-5w25m"] Nov 25 16:20:22 crc kubenswrapper[4879]: I1125 16:20:22.086584 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-a082-account-create-q8ptq"] Nov 25 16:20:23 crc kubenswrapper[4879]: I1125 16:20:23.704437 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a66844e-4198-4176-a3b5-1a0cff251dff" path="/var/lib/kubelet/pods/2a66844e-4198-4176-a3b5-1a0cff251dff/volumes" Nov 25 16:20:23 crc kubenswrapper[4879]: I1125 16:20:23.708913 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d33f0b6-2083-4926-87bb-ea4c103c19d1" path="/var/lib/kubelet/pods/4d33f0b6-2083-4926-87bb-ea4c103c19d1/volumes" Nov 25 16:20:34 crc kubenswrapper[4879]: I1125 16:20:34.038151 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-s724w"] Nov 25 16:20:34 crc kubenswrapper[4879]: I1125 16:20:34.049950 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-s724w"] Nov 25 16:20:35 crc kubenswrapper[4879]: I1125 16:20:35.658566 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc563b11-2bd4-4f94-8264-c4accf7969c6" path="/var/lib/kubelet/pods/cc563b11-2bd4-4f94-8264-c4accf7969c6/volumes" Nov 25 16:20:47 crc kubenswrapper[4879]: I1125 16:20:47.409146 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:20:47 crc kubenswrapper[4879]: I1125 16:20:47.409680 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:20:56 crc kubenswrapper[4879]: I1125 16:20:56.387912 4879 scope.go:117] "RemoveContainer" containerID="137a3d0d5fd3865902f93b4c1e1baab545cdea3babed3ba115ac17c6202a2698" Nov 25 16:20:56 crc kubenswrapper[4879]: I1125 16:20:56.412901 4879 scope.go:117] "RemoveContainer" containerID="c72ba6ce7c97c2215dea3746dfad87b93ef9329ee84a89fa5e0e5f9c41c73e49" Nov 25 16:20:56 crc kubenswrapper[4879]: I1125 16:20:56.469948 4879 scope.go:117] "RemoveContainer" containerID="232a8549eb72c624baff9fc81e2b0ac7d614977fe6e1113a5fd72696aea08b71" Nov 25 16:21:17 crc kubenswrapper[4879]: I1125 16:21:17.409458 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:21:17 crc kubenswrapper[4879]: I1125 16:21:17.410005 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:21:17 crc kubenswrapper[4879]: I1125 16:21:17.410048 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:21:17 crc kubenswrapper[4879]: I1125 16:21:17.410610 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0849b9e419300bb4f5ce41bd03728d5d705289e312f8bcc42e36492712bebfde"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:21:17 crc kubenswrapper[4879]: I1125 16:21:17.410665 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://0849b9e419300bb4f5ce41bd03728d5d705289e312f8bcc42e36492712bebfde" gracePeriod=600 Nov 25 16:21:18 crc kubenswrapper[4879]: I1125 16:21:18.377394 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="0849b9e419300bb4f5ce41bd03728d5d705289e312f8bcc42e36492712bebfde" exitCode=0 Nov 25 16:21:18 crc kubenswrapper[4879]: I1125 16:21:18.377924 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"0849b9e419300bb4f5ce41bd03728d5d705289e312f8bcc42e36492712bebfde"} Nov 25 16:21:18 crc kubenswrapper[4879]: I1125 16:21:18.377953 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83"} Nov 25 16:21:18 crc kubenswrapper[4879]: I1125 16:21:18.377968 4879 scope.go:117] "RemoveContainer" containerID="a144a9aeb51dbb0727abe3901813c5e04d71ae3646a1a7f2073ea66ed3b46dd4" Nov 25 16:22:47 crc kubenswrapper[4879]: I1125 16:22:47.041439 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-dq494"] Nov 25 16:22:47 crc kubenswrapper[4879]: I1125 16:22:47.051223 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-dq494"] Nov 25 16:22:47 crc kubenswrapper[4879]: I1125 16:22:47.666336 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11d2ce32-3fdf-486f-86ce-6dd86a0e272b" path="/var/lib/kubelet/pods/11d2ce32-3fdf-486f-86ce-6dd86a0e272b/volumes" Nov 25 16:22:48 crc kubenswrapper[4879]: I1125 16:22:48.033788 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-5cc7-account-create-w7nlh"] Nov 25 16:22:48 crc kubenswrapper[4879]: I1125 16:22:48.043450 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-5cc7-account-create-w7nlh"] Nov 25 16:22:49 crc kubenswrapper[4879]: I1125 16:22:49.658010 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eabac680-caa0-462c-9f71-ec0a1ae262de" path="/var/lib/kubelet/pods/eabac680-caa0-462c-9f71-ec0a1ae262de/volumes" Nov 25 16:22:56 crc kubenswrapper[4879]: I1125 16:22:56.617995 4879 scope.go:117] "RemoveContainer" containerID="2c19422b7cd06a9c3b5c5da6cdd4f9951a19cd4f96422694ed0688a596b483a3" Nov 25 16:22:56 crc kubenswrapper[4879]: I1125 16:22:56.648762 4879 scope.go:117] "RemoveContainer" containerID="e8a28495706129a48858bfacb91eaa98701fe0518462e9f0c382385113c87af6" Nov 25 16:23:08 crc kubenswrapper[4879]: I1125 16:23:08.029728 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-2jcrb"] Nov 25 16:23:08 crc kubenswrapper[4879]: I1125 16:23:08.039914 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-2jcrb"] Nov 25 16:23:09 crc kubenswrapper[4879]: I1125 16:23:09.658882 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db92ff9-020b-4235-b2bb-74ee897afee5" path="/var/lib/kubelet/pods/2db92ff9-020b-4235-b2bb-74ee897afee5/volumes" Nov 25 16:23:17 crc kubenswrapper[4879]: I1125 16:23:17.409028 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:23:17 crc kubenswrapper[4879]: I1125 16:23:17.409640 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:23:42 crc kubenswrapper[4879]: I1125 16:23:42.049179 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-e675-account-create-r7z7g"] Nov 25 16:23:42 crc kubenswrapper[4879]: I1125 16:23:42.060325 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-f49lf"] Nov 25 16:23:42 crc kubenswrapper[4879]: I1125 16:23:42.071661 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-e675-account-create-r7z7g"] Nov 25 16:23:42 crc kubenswrapper[4879]: I1125 16:23:42.083021 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-f49lf"] Nov 25 16:23:43 crc kubenswrapper[4879]: I1125 16:23:43.658390 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a89a828-7c8f-4ae4-a046-9e81fbb4969d" path="/var/lib/kubelet/pods/0a89a828-7c8f-4ae4-a046-9e81fbb4969d/volumes" Nov 25 16:23:43 crc kubenswrapper[4879]: I1125 16:23:43.661829 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b021d6a6-a7d7-4f27-9f8e-47df75f82eed" path="/var/lib/kubelet/pods/b021d6a6-a7d7-4f27-9f8e-47df75f82eed/volumes" Nov 25 16:23:47 crc kubenswrapper[4879]: I1125 16:23:47.408615 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:23:47 crc kubenswrapper[4879]: I1125 16:23:47.409237 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:23:56 crc kubenswrapper[4879]: I1125 16:23:56.796787 4879 scope.go:117] "RemoveContainer" containerID="a007105d78cfd14b11e8f4ec9ccd9302dd455140290b018c012a3caf49ccb3a2" Nov 25 16:23:56 crc kubenswrapper[4879]: I1125 16:23:56.839677 4879 scope.go:117] "RemoveContainer" containerID="358d30d407d2fe5ffe23ae886d1f16c41e43148584cbffeed10cf1141fa1592c" Nov 25 16:23:56 crc kubenswrapper[4879]: I1125 16:23:56.904797 4879 scope.go:117] "RemoveContainer" containerID="37b1a754bac274e0d8f424bd4d62534521e5d6746bf5fbf4ac6238021ded3805" Nov 25 16:24:05 crc kubenswrapper[4879]: I1125 16:24:05.040362 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-6q5vt"] Nov 25 16:24:05 crc kubenswrapper[4879]: I1125 16:24:05.050216 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-6q5vt"] Nov 25 16:24:05 crc kubenswrapper[4879]: I1125 16:24:05.670789 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc9955de-2773-439c-8ae3-0250704dbd65" path="/var/lib/kubelet/pods/fc9955de-2773-439c-8ae3-0250704dbd65/volumes" Nov 25 16:24:17 crc kubenswrapper[4879]: I1125 16:24:17.408846 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:24:17 crc kubenswrapper[4879]: I1125 16:24:17.409404 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:24:17 crc kubenswrapper[4879]: I1125 16:24:17.409458 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:24:17 crc kubenswrapper[4879]: I1125 16:24:17.410319 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:24:17 crc kubenswrapper[4879]: I1125 16:24:17.410372 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" gracePeriod=600 Nov 25 16:24:17 crc kubenswrapper[4879]: E1125 16:24:17.547387 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:24:18 crc kubenswrapper[4879]: I1125 16:24:18.153956 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" exitCode=0 Nov 25 16:24:18 crc kubenswrapper[4879]: I1125 16:24:18.153999 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83"} Nov 25 16:24:18 crc kubenswrapper[4879]: I1125 16:24:18.154041 4879 scope.go:117] "RemoveContainer" containerID="0849b9e419300bb4f5ce41bd03728d5d705289e312f8bcc42e36492712bebfde" Nov 25 16:24:18 crc kubenswrapper[4879]: I1125 16:24:18.154747 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:24:18 crc kubenswrapper[4879]: E1125 16:24:18.155074 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:24:30 crc kubenswrapper[4879]: I1125 16:24:30.645642 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:24:30 crc kubenswrapper[4879]: E1125 16:24:30.646470 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:24:44 crc kubenswrapper[4879]: I1125 16:24:44.644634 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:24:44 crc kubenswrapper[4879]: E1125 16:24:44.645730 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:24:55 crc kubenswrapper[4879]: I1125 16:24:55.645238 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:24:55 crc kubenswrapper[4879]: E1125 16:24:55.645986 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:24:57 crc kubenswrapper[4879]: I1125 16:24:57.061408 4879 scope.go:117] "RemoveContainer" containerID="d3af482652567946c1d31642af7b04f54151e67dc1b90343e45eaeebaa4091c5" Nov 25 16:25:08 crc kubenswrapper[4879]: I1125 16:25:08.645219 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:25:08 crc kubenswrapper[4879]: E1125 16:25:08.645916 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:25:22 crc kubenswrapper[4879]: I1125 16:25:22.645106 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:25:22 crc kubenswrapper[4879]: E1125 16:25:22.645885 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:25:33 crc kubenswrapper[4879]: I1125 16:25:33.645328 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:25:33 crc kubenswrapper[4879]: E1125 16:25:33.645982 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.146055 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dcwr9"] Nov 25 16:25:35 crc kubenswrapper[4879]: E1125 16:25:35.147012 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="registry-server" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.147024 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="registry-server" Nov 25 16:25:35 crc kubenswrapper[4879]: E1125 16:25:35.147044 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="extract-content" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.147050 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="extract-content" Nov 25 16:25:35 crc kubenswrapper[4879]: E1125 16:25:35.147099 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="extract-utilities" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.147105 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="extract-utilities" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.147331 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a39a149-1e29-4b2e-b90a-a39f857874df" containerName="registry-server" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.148980 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.160604 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dcwr9"] Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.344285 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-utilities\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.345188 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-catalog-content\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.345288 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnkbt\" (UniqueName: \"kubernetes.io/projected/bd2e3496-bf43-4389-ae44-13b2295e6f0c-kube-api-access-dnkbt\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.447293 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-utilities\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.447466 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-catalog-content\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.447563 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnkbt\" (UniqueName: \"kubernetes.io/projected/bd2e3496-bf43-4389-ae44-13b2295e6f0c-kube-api-access-dnkbt\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.447817 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-utilities\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.447964 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-catalog-content\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.473688 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnkbt\" (UniqueName: \"kubernetes.io/projected/bd2e3496-bf43-4389-ae44-13b2295e6f0c-kube-api-access-dnkbt\") pod \"redhat-marketplace-dcwr9\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:35 crc kubenswrapper[4879]: I1125 16:25:35.769174 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:36 crc kubenswrapper[4879]: I1125 16:25:36.219220 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dcwr9"] Nov 25 16:25:37 crc kubenswrapper[4879]: I1125 16:25:37.045666 4879 generic.go:334] "Generic (PLEG): container finished" podID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerID="057479f5fd25a2bc8a76c01476368c7f250a3470247698b61a076cce483896a4" exitCode=0 Nov 25 16:25:37 crc kubenswrapper[4879]: I1125 16:25:37.045720 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerDied","Data":"057479f5fd25a2bc8a76c01476368c7f250a3470247698b61a076cce483896a4"} Nov 25 16:25:37 crc kubenswrapper[4879]: I1125 16:25:37.045946 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerStarted","Data":"48e61a628ca158451d88dca75c46dc915319066733c50e9ba436a66e35464216"} Nov 25 16:25:37 crc kubenswrapper[4879]: I1125 16:25:37.048363 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:25:39 crc kubenswrapper[4879]: I1125 16:25:39.067923 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerStarted","Data":"55368e767a8ee1e2771522f3cbe22ea53c68d994db3379bf2efa731534db083e"} Nov 25 16:25:41 crc kubenswrapper[4879]: I1125 16:25:41.089332 4879 generic.go:334] "Generic (PLEG): container finished" podID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerID="55368e767a8ee1e2771522f3cbe22ea53c68d994db3379bf2efa731534db083e" exitCode=0 Nov 25 16:25:41 crc kubenswrapper[4879]: I1125 16:25:41.089797 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerDied","Data":"55368e767a8ee1e2771522f3cbe22ea53c68d994db3379bf2efa731534db083e"} Nov 25 16:25:43 crc kubenswrapper[4879]: I1125 16:25:43.112296 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerStarted","Data":"a41396185d570bd664d0c909b5307f851d15e0f394008aba65013e76185ae47e"} Nov 25 16:25:43 crc kubenswrapper[4879]: I1125 16:25:43.150400 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dcwr9" podStartSLOduration=2.797339975 podStartE2EDuration="8.150381429s" podCreationTimestamp="2025-11-25 16:25:35 +0000 UTC" firstStartedPulling="2025-11-25 16:25:37.048023304 +0000 UTC m=+7228.651436375" lastFinishedPulling="2025-11-25 16:25:42.401064758 +0000 UTC m=+7234.004477829" observedRunningTime="2025-11-25 16:25:43.13632354 +0000 UTC m=+7234.739736611" watchObservedRunningTime="2025-11-25 16:25:43.150381429 +0000 UTC m=+7234.753794500" Nov 25 16:25:45 crc kubenswrapper[4879]: I1125 16:25:45.769948 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:45 crc kubenswrapper[4879]: I1125 16:25:45.770473 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:45 crc kubenswrapper[4879]: I1125 16:25:45.824149 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:47 crc kubenswrapper[4879]: I1125 16:25:47.646666 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:25:47 crc kubenswrapper[4879]: E1125 16:25:47.647270 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:25:51 crc kubenswrapper[4879]: I1125 16:25:51.775473 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vk52q"] Nov 25 16:25:51 crc kubenswrapper[4879]: I1125 16:25:51.779020 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:51 crc kubenswrapper[4879]: I1125 16:25:51.803934 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vk52q"] Nov 25 16:25:51 crc kubenswrapper[4879]: I1125 16:25:51.902030 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-catalog-content\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:51 crc kubenswrapper[4879]: I1125 16:25:51.902381 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9p9n\" (UniqueName: \"kubernetes.io/projected/cb2c630b-9faf-4050-b419-63a09b6b6cc8-kube-api-access-n9p9n\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:51 crc kubenswrapper[4879]: I1125 16:25:51.902432 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-utilities\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.004503 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9p9n\" (UniqueName: \"kubernetes.io/projected/cb2c630b-9faf-4050-b419-63a09b6b6cc8-kube-api-access-n9p9n\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.004588 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-utilities\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.004778 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-catalog-content\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.005098 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-utilities\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.005200 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-catalog-content\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.028888 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9p9n\" (UniqueName: \"kubernetes.io/projected/cb2c630b-9faf-4050-b419-63a09b6b6cc8-kube-api-access-n9p9n\") pod \"community-operators-vk52q\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.140637 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:25:52 crc kubenswrapper[4879]: I1125 16:25:52.675393 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vk52q"] Nov 25 16:25:53 crc kubenswrapper[4879]: I1125 16:25:53.211197 4879 generic.go:334] "Generic (PLEG): container finished" podID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerID="20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303" exitCode=0 Nov 25 16:25:53 crc kubenswrapper[4879]: I1125 16:25:53.211328 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerDied","Data":"20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303"} Nov 25 16:25:53 crc kubenswrapper[4879]: I1125 16:25:53.211501 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerStarted","Data":"91cb7a17614d641a59e82ab242ffffb274a79e637e9986efd8c40c064bc1cafd"} Nov 25 16:25:55 crc kubenswrapper[4879]: I1125 16:25:55.230978 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerStarted","Data":"b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f"} Nov 25 16:25:55 crc kubenswrapper[4879]: I1125 16:25:55.822451 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:57 crc kubenswrapper[4879]: I1125 16:25:57.145308 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dcwr9"] Nov 25 16:25:57 crc kubenswrapper[4879]: I1125 16:25:57.145891 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dcwr9" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="registry-server" containerID="cri-o://a41396185d570bd664d0c909b5307f851d15e0f394008aba65013e76185ae47e" gracePeriod=2 Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.277703 4879 generic.go:334] "Generic (PLEG): container finished" podID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerID="a41396185d570bd664d0c909b5307f851d15e0f394008aba65013e76185ae47e" exitCode=0 Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.277822 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerDied","Data":"a41396185d570bd664d0c909b5307f851d15e0f394008aba65013e76185ae47e"} Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.473720 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.650161 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-catalog-content\") pod \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.650579 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnkbt\" (UniqueName: \"kubernetes.io/projected/bd2e3496-bf43-4389-ae44-13b2295e6f0c-kube-api-access-dnkbt\") pod \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.650659 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-utilities\") pod \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\" (UID: \"bd2e3496-bf43-4389-ae44-13b2295e6f0c\") " Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.651351 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-utilities" (OuterVolumeSpecName: "utilities") pod "bd2e3496-bf43-4389-ae44-13b2295e6f0c" (UID: "bd2e3496-bf43-4389-ae44-13b2295e6f0c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.669488 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd2e3496-bf43-4389-ae44-13b2295e6f0c" (UID: "bd2e3496-bf43-4389-ae44-13b2295e6f0c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.677903 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd2e3496-bf43-4389-ae44-13b2295e6f0c-kube-api-access-dnkbt" (OuterVolumeSpecName: "kube-api-access-dnkbt") pod "bd2e3496-bf43-4389-ae44-13b2295e6f0c" (UID: "bd2e3496-bf43-4389-ae44-13b2295e6f0c"). InnerVolumeSpecName "kube-api-access-dnkbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.753307 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.753357 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd2e3496-bf43-4389-ae44-13b2295e6f0c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:25:58 crc kubenswrapper[4879]: I1125 16:25:58.753373 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnkbt\" (UniqueName: \"kubernetes.io/projected/bd2e3496-bf43-4389-ae44-13b2295e6f0c-kube-api-access-dnkbt\") on node \"crc\" DevicePath \"\"" Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.292869 4879 generic.go:334] "Generic (PLEG): container finished" podID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerID="b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f" exitCode=0 Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.292943 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerDied","Data":"b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f"} Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.299393 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dcwr9" event={"ID":"bd2e3496-bf43-4389-ae44-13b2295e6f0c","Type":"ContainerDied","Data":"48e61a628ca158451d88dca75c46dc915319066733c50e9ba436a66e35464216"} Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.299447 4879 scope.go:117] "RemoveContainer" containerID="a41396185d570bd664d0c909b5307f851d15e0f394008aba65013e76185ae47e" Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.299624 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dcwr9" Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.332680 4879 scope.go:117] "RemoveContainer" containerID="55368e767a8ee1e2771522f3cbe22ea53c68d994db3379bf2efa731534db083e" Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.347521 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dcwr9"] Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.359914 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dcwr9"] Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.361447 4879 scope.go:117] "RemoveContainer" containerID="057479f5fd25a2bc8a76c01476368c7f250a3470247698b61a076cce483896a4" Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.652064 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:25:59 crc kubenswrapper[4879]: E1125 16:25:59.652369 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:25:59 crc kubenswrapper[4879]: I1125 16:25:59.660495 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" path="/var/lib/kubelet/pods/bd2e3496-bf43-4389-ae44-13b2295e6f0c/volumes" Nov 25 16:26:00 crc kubenswrapper[4879]: I1125 16:26:00.314685 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerStarted","Data":"edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3"} Nov 25 16:26:00 crc kubenswrapper[4879]: I1125 16:26:00.341595 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vk52q" podStartSLOduration=2.760848627 podStartE2EDuration="9.341569551s" podCreationTimestamp="2025-11-25 16:25:51 +0000 UTC" firstStartedPulling="2025-11-25 16:25:53.213141637 +0000 UTC m=+7244.816554708" lastFinishedPulling="2025-11-25 16:25:59.793862561 +0000 UTC m=+7251.397275632" observedRunningTime="2025-11-25 16:26:00.335402756 +0000 UTC m=+7251.938815837" watchObservedRunningTime="2025-11-25 16:26:00.341569551 +0000 UTC m=+7251.944982622" Nov 25 16:26:02 crc kubenswrapper[4879]: I1125 16:26:02.141398 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:26:02 crc kubenswrapper[4879]: I1125 16:26:02.141721 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:26:02 crc kubenswrapper[4879]: I1125 16:26:02.187461 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:26:11 crc kubenswrapper[4879]: I1125 16:26:11.645049 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:26:11 crc kubenswrapper[4879]: E1125 16:26:11.646033 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:26:12 crc kubenswrapper[4879]: I1125 16:26:12.189258 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:26:12 crc kubenswrapper[4879]: I1125 16:26:12.237328 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vk52q"] Nov 25 16:26:12 crc kubenswrapper[4879]: I1125 16:26:12.429289 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vk52q" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="registry-server" containerID="cri-o://edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3" gracePeriod=2 Nov 25 16:26:12 crc kubenswrapper[4879]: I1125 16:26:12.954659 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.062208 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-catalog-content\") pod \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.062411 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9p9n\" (UniqueName: \"kubernetes.io/projected/cb2c630b-9faf-4050-b419-63a09b6b6cc8-kube-api-access-n9p9n\") pod \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.062479 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-utilities\") pod \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\" (UID: \"cb2c630b-9faf-4050-b419-63a09b6b6cc8\") " Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.063267 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-utilities" (OuterVolumeSpecName: "utilities") pod "cb2c630b-9faf-4050-b419-63a09b6b6cc8" (UID: "cb2c630b-9faf-4050-b419-63a09b6b6cc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.069198 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb2c630b-9faf-4050-b419-63a09b6b6cc8-kube-api-access-n9p9n" (OuterVolumeSpecName: "kube-api-access-n9p9n") pod "cb2c630b-9faf-4050-b419-63a09b6b6cc8" (UID: "cb2c630b-9faf-4050-b419-63a09b6b6cc8"). InnerVolumeSpecName "kube-api-access-n9p9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.110383 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb2c630b-9faf-4050-b419-63a09b6b6cc8" (UID: "cb2c630b-9faf-4050-b419-63a09b6b6cc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.165572 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9p9n\" (UniqueName: \"kubernetes.io/projected/cb2c630b-9faf-4050-b419-63a09b6b6cc8-kube-api-access-n9p9n\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.165826 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.165847 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb2c630b-9faf-4050-b419-63a09b6b6cc8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.441307 4879 generic.go:334] "Generic (PLEG): container finished" podID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerID="edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3" exitCode=0 Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.441392 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerDied","Data":"edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3"} Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.441628 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vk52q" event={"ID":"cb2c630b-9faf-4050-b419-63a09b6b6cc8","Type":"ContainerDied","Data":"91cb7a17614d641a59e82ab242ffffb274a79e637e9986efd8c40c064bc1cafd"} Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.441659 4879 scope.go:117] "RemoveContainer" containerID="edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.441436 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vk52q" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.466271 4879 scope.go:117] "RemoveContainer" containerID="b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.475255 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vk52q"] Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.485351 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vk52q"] Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.496365 4879 scope.go:117] "RemoveContainer" containerID="20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.542798 4879 scope.go:117] "RemoveContainer" containerID="edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3" Nov 25 16:26:13 crc kubenswrapper[4879]: E1125 16:26:13.543296 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3\": container with ID starting with edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3 not found: ID does not exist" containerID="edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.543335 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3"} err="failed to get container status \"edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3\": rpc error: code = NotFound desc = could not find container \"edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3\": container with ID starting with edf575903e71836e1c0741152bc12898136ecdaa4fe1887ac157020db0a623a3 not found: ID does not exist" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.543361 4879 scope.go:117] "RemoveContainer" containerID="b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f" Nov 25 16:26:13 crc kubenswrapper[4879]: E1125 16:26:13.543801 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f\": container with ID starting with b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f not found: ID does not exist" containerID="b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.543825 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f"} err="failed to get container status \"b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f\": rpc error: code = NotFound desc = could not find container \"b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f\": container with ID starting with b4366c9ee3d72cfd60815b38b3d35c19fde729698f4a584072c421197ce85f6f not found: ID does not exist" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.543838 4879 scope.go:117] "RemoveContainer" containerID="20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303" Nov 25 16:26:13 crc kubenswrapper[4879]: E1125 16:26:13.544097 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303\": container with ID starting with 20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303 not found: ID does not exist" containerID="20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.544154 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303"} err="failed to get container status \"20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303\": rpc error: code = NotFound desc = could not find container \"20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303\": container with ID starting with 20ccd5cef398819a73dacb0a1437e5b1e45834d0cf64c5be39a071f2bdfee303 not found: ID does not exist" Nov 25 16:26:13 crc kubenswrapper[4879]: I1125 16:26:13.658169 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" path="/var/lib/kubelet/pods/cb2c630b-9faf-4050-b419-63a09b6b6cc8/volumes" Nov 25 16:26:24 crc kubenswrapper[4879]: I1125 16:26:24.645238 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:26:24 crc kubenswrapper[4879]: E1125 16:26:24.645940 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:26:36 crc kubenswrapper[4879]: I1125 16:26:36.644915 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:26:36 crc kubenswrapper[4879]: E1125 16:26:36.645825 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:26:41 crc kubenswrapper[4879]: I1125 16:26:41.162169 4879 generic.go:334] "Generic (PLEG): container finished" podID="2101fdfc-c211-4108-aaab-562995e85279" containerID="ba524094a0a5239a797a49dfd2d7e4dab4fb034098124fc3d0fc80cbc16dfe70" exitCode=0 Nov 25 16:26:41 crc kubenswrapper[4879]: I1125 16:26:41.162234 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" event={"ID":"2101fdfc-c211-4108-aaab-562995e85279","Type":"ContainerDied","Data":"ba524094a0a5239a797a49dfd2d7e4dab4fb034098124fc3d0fc80cbc16dfe70"} Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.649808 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.762783 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-inventory\") pod \"2101fdfc-c211-4108-aaab-562995e85279\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.762882 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ssh-key\") pod \"2101fdfc-c211-4108-aaab-562995e85279\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.762909 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ceph\") pod \"2101fdfc-c211-4108-aaab-562995e85279\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.763083 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-tripleo-cleanup-combined-ca-bundle\") pod \"2101fdfc-c211-4108-aaab-562995e85279\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.763256 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clvs6\" (UniqueName: \"kubernetes.io/projected/2101fdfc-c211-4108-aaab-562995e85279-kube-api-access-clvs6\") pod \"2101fdfc-c211-4108-aaab-562995e85279\" (UID: \"2101fdfc-c211-4108-aaab-562995e85279\") " Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.777476 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ceph" (OuterVolumeSpecName: "ceph") pod "2101fdfc-c211-4108-aaab-562995e85279" (UID: "2101fdfc-c211-4108-aaab-562995e85279"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.778004 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2101fdfc-c211-4108-aaab-562995e85279-kube-api-access-clvs6" (OuterVolumeSpecName: "kube-api-access-clvs6") pod "2101fdfc-c211-4108-aaab-562995e85279" (UID: "2101fdfc-c211-4108-aaab-562995e85279"). InnerVolumeSpecName "kube-api-access-clvs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.782705 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "2101fdfc-c211-4108-aaab-562995e85279" (UID: "2101fdfc-c211-4108-aaab-562995e85279"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.809970 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2101fdfc-c211-4108-aaab-562995e85279" (UID: "2101fdfc-c211-4108-aaab-562995e85279"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.814786 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-inventory" (OuterVolumeSpecName: "inventory") pod "2101fdfc-c211-4108-aaab-562995e85279" (UID: "2101fdfc-c211-4108-aaab-562995e85279"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.868738 4879 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.868785 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clvs6\" (UniqueName: \"kubernetes.io/projected/2101fdfc-c211-4108-aaab-562995e85279-kube-api-access-clvs6\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.868801 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.868814 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:42 crc kubenswrapper[4879]: I1125 16:26:42.868826 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2101fdfc-c211-4108-aaab-562995e85279-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:26:43 crc kubenswrapper[4879]: I1125 16:26:43.184432 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" event={"ID":"2101fdfc-c211-4108-aaab-562995e85279","Type":"ContainerDied","Data":"541c0be717e881d73bf2b7553c3d90b2599f95535ce47fcf5243648a55c6a23c"} Nov 25 16:26:43 crc kubenswrapper[4879]: I1125 16:26:43.184507 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="541c0be717e881d73bf2b7553c3d90b2599f95535ce47fcf5243648a55c6a23c" Nov 25 16:26:43 crc kubenswrapper[4879]: I1125 16:26:43.184553 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf" Nov 25 16:26:49 crc kubenswrapper[4879]: I1125 16:26:49.651419 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:26:49 crc kubenswrapper[4879]: E1125 16:26:49.652252 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.408995 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-fcw6q"] Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409648 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2101fdfc-c211-4108-aaab-562995e85279" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409677 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2101fdfc-c211-4108-aaab-562995e85279" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409708 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="extract-content" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409717 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="extract-content" Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409729 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="extract-utilities" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409737 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="extract-utilities" Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409756 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="extract-utilities" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409778 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="extract-utilities" Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409801 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="registry-server" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409810 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="registry-server" Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409830 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="extract-content" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409837 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="extract-content" Nov 25 16:26:50 crc kubenswrapper[4879]: E1125 16:26:50.409852 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="registry-server" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.409858 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="registry-server" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.410075 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb2c630b-9faf-4050-b419-63a09b6b6cc8" containerName="registry-server" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.410094 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd2e3496-bf43-4389-ae44-13b2295e6f0c" containerName="registry-server" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.410106 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2101fdfc-c211-4108-aaab-562995e85279" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.410968 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.413440 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.413537 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.414100 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.414344 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.419654 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-fcw6q"] Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.543091 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.543222 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-inventory\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.543335 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ceph\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.543561 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.543700 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdkt8\" (UniqueName: \"kubernetes.io/projected/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-kube-api-access-wdkt8\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.645825 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-inventory\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.646586 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ceph\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.646971 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.647051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdkt8\" (UniqueName: \"kubernetes.io/projected/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-kube-api-access-wdkt8\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.648203 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.651776 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ceph\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.652071 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.652174 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.655707 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-inventory\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.665697 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdkt8\" (UniqueName: \"kubernetes.io/projected/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-kube-api-access-wdkt8\") pod \"bootstrap-openstack-openstack-cell1-fcw6q\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:50 crc kubenswrapper[4879]: I1125 16:26:50.736884 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:26:51 crc kubenswrapper[4879]: I1125 16:26:51.307684 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-fcw6q"] Nov 25 16:26:52 crc kubenswrapper[4879]: I1125 16:26:52.279684 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" event={"ID":"524593c5-f8a9-41e4-99b0-2ef4f69b37b3","Type":"ContainerStarted","Data":"b3dd0603e76d25e06cbb5f1c3f81b63d69295e68f52d35813950c3747848650a"} Nov 25 16:26:52 crc kubenswrapper[4879]: I1125 16:26:52.280275 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" event={"ID":"524593c5-f8a9-41e4-99b0-2ef4f69b37b3","Type":"ContainerStarted","Data":"46529aaa1e4bd0b06550d212c6ddecde796938722555b3045f6df6f2a1156140"} Nov 25 16:26:52 crc kubenswrapper[4879]: I1125 16:26:52.299173 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" podStartSLOduration=1.681525932 podStartE2EDuration="2.299144699s" podCreationTimestamp="2025-11-25 16:26:50 +0000 UTC" firstStartedPulling="2025-11-25 16:26:51.3204143 +0000 UTC m=+7302.923827371" lastFinishedPulling="2025-11-25 16:26:51.938033067 +0000 UTC m=+7303.541446138" observedRunningTime="2025-11-25 16:26:52.296845347 +0000 UTC m=+7303.900258428" watchObservedRunningTime="2025-11-25 16:26:52.299144699 +0000 UTC m=+7303.902557780" Nov 25 16:27:03 crc kubenswrapper[4879]: I1125 16:27:03.645002 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:27:03 crc kubenswrapper[4879]: E1125 16:27:03.645858 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:27:14 crc kubenswrapper[4879]: I1125 16:27:14.861358 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2jqhg"] Nov 25 16:27:14 crc kubenswrapper[4879]: I1125 16:27:14.864867 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:14 crc kubenswrapper[4879]: I1125 16:27:14.876622 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jqhg"] Nov 25 16:27:14 crc kubenswrapper[4879]: I1125 16:27:14.971073 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-catalog-content\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:14 crc kubenswrapper[4879]: I1125 16:27:14.971212 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dntgt\" (UniqueName: \"kubernetes.io/projected/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-kube-api-access-dntgt\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:14 crc kubenswrapper[4879]: I1125 16:27:14.971334 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-utilities\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.073647 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-catalog-content\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.073774 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dntgt\" (UniqueName: \"kubernetes.io/projected/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-kube-api-access-dntgt\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.073888 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-utilities\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.074284 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-catalog-content\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.074558 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-utilities\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.107827 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dntgt\" (UniqueName: \"kubernetes.io/projected/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-kube-api-access-dntgt\") pod \"redhat-operators-2jqhg\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.193982 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.644771 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:27:15 crc kubenswrapper[4879]: E1125 16:27:15.645450 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:27:15 crc kubenswrapper[4879]: W1125 16:27:15.697034 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fb5d84c_2f45_4a62_9e0f_2daa042aad0d.slice/crio-0c661a4b3b38806900383e7885e6f11bb0275c05ae414ccae96be1d58f8947b7 WatchSource:0}: Error finding container 0c661a4b3b38806900383e7885e6f11bb0275c05ae414ccae96be1d58f8947b7: Status 404 returned error can't find the container with id 0c661a4b3b38806900383e7885e6f11bb0275c05ae414ccae96be1d58f8947b7 Nov 25 16:27:15 crc kubenswrapper[4879]: I1125 16:27:15.697574 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2jqhg"] Nov 25 16:27:16 crc kubenswrapper[4879]: I1125 16:27:16.727108 4879 generic.go:334] "Generic (PLEG): container finished" podID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerID="e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f" exitCode=0 Nov 25 16:27:16 crc kubenswrapper[4879]: I1125 16:27:16.727456 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerDied","Data":"e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f"} Nov 25 16:27:16 crc kubenswrapper[4879]: I1125 16:27:16.727512 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerStarted","Data":"0c661a4b3b38806900383e7885e6f11bb0275c05ae414ccae96be1d58f8947b7"} Nov 25 16:27:17 crc kubenswrapper[4879]: I1125 16:27:17.741938 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerStarted","Data":"c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab"} Nov 25 16:27:22 crc kubenswrapper[4879]: I1125 16:27:22.794504 4879 generic.go:334] "Generic (PLEG): container finished" podID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerID="c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab" exitCode=0 Nov 25 16:27:22 crc kubenswrapper[4879]: I1125 16:27:22.794567 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerDied","Data":"c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab"} Nov 25 16:27:23 crc kubenswrapper[4879]: I1125 16:27:23.826040 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerStarted","Data":"97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5"} Nov 25 16:27:23 crc kubenswrapper[4879]: I1125 16:27:23.865777 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2jqhg" podStartSLOduration=3.230858104 podStartE2EDuration="9.865756927s" podCreationTimestamp="2025-11-25 16:27:14 +0000 UTC" firstStartedPulling="2025-11-25 16:27:16.733187737 +0000 UTC m=+7328.336600808" lastFinishedPulling="2025-11-25 16:27:23.36808655 +0000 UTC m=+7334.971499631" observedRunningTime="2025-11-25 16:27:23.855585254 +0000 UTC m=+7335.458998335" watchObservedRunningTime="2025-11-25 16:27:23.865756927 +0000 UTC m=+7335.469169998" Nov 25 16:27:25 crc kubenswrapper[4879]: I1125 16:27:25.194411 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:25 crc kubenswrapper[4879]: I1125 16:27:25.194470 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:26 crc kubenswrapper[4879]: I1125 16:27:26.244149 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2jqhg" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="registry-server" probeResult="failure" output=< Nov 25 16:27:26 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:27:26 crc kubenswrapper[4879]: > Nov 25 16:27:26 crc kubenswrapper[4879]: I1125 16:27:26.645474 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:27:26 crc kubenswrapper[4879]: E1125 16:27:26.645886 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:27:35 crc kubenswrapper[4879]: I1125 16:27:35.242202 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:35 crc kubenswrapper[4879]: I1125 16:27:35.304363 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:35 crc kubenswrapper[4879]: I1125 16:27:35.482316 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2jqhg"] Nov 25 16:27:36 crc kubenswrapper[4879]: I1125 16:27:36.956819 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2jqhg" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="registry-server" containerID="cri-o://97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5" gracePeriod=2 Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.483885 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.620222 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-catalog-content\") pod \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.620479 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-utilities\") pod \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.620508 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dntgt\" (UniqueName: \"kubernetes.io/projected/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-kube-api-access-dntgt\") pod \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\" (UID: \"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d\") " Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.622024 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-utilities" (OuterVolumeSpecName: "utilities") pod "3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" (UID: "3fb5d84c-2f45-4a62-9e0f-2daa042aad0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.627968 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-kube-api-access-dntgt" (OuterVolumeSpecName: "kube-api-access-dntgt") pod "3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" (UID: "3fb5d84c-2f45-4a62-9e0f-2daa042aad0d"). InnerVolumeSpecName "kube-api-access-dntgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.719959 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" (UID: "3fb5d84c-2f45-4a62-9e0f-2daa042aad0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.725252 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.725324 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dntgt\" (UniqueName: \"kubernetes.io/projected/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-kube-api-access-dntgt\") on node \"crc\" DevicePath \"\"" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.725336 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.978352 4879 generic.go:334] "Generic (PLEG): container finished" podID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerID="97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5" exitCode=0 Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.978413 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerDied","Data":"97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5"} Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.978443 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2jqhg" event={"ID":"3fb5d84c-2f45-4a62-9e0f-2daa042aad0d","Type":"ContainerDied","Data":"0c661a4b3b38806900383e7885e6f11bb0275c05ae414ccae96be1d58f8947b7"} Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.978477 4879 scope.go:117] "RemoveContainer" containerID="97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5" Nov 25 16:27:37 crc kubenswrapper[4879]: I1125 16:27:37.978752 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2jqhg" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.006144 4879 scope.go:117] "RemoveContainer" containerID="c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.025411 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2jqhg"] Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.038619 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2jqhg"] Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.063090 4879 scope.go:117] "RemoveContainer" containerID="e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.103790 4879 scope.go:117] "RemoveContainer" containerID="97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5" Nov 25 16:27:38 crc kubenswrapper[4879]: E1125 16:27:38.104316 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5\": container with ID starting with 97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5 not found: ID does not exist" containerID="97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.104362 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5"} err="failed to get container status \"97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5\": rpc error: code = NotFound desc = could not find container \"97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5\": container with ID starting with 97ea6a54e1b5c2e4ed14d275ec8df376c4a04a6f0b4eaaed02de1ffc8aaf56f5 not found: ID does not exist" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.104391 4879 scope.go:117] "RemoveContainer" containerID="c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab" Nov 25 16:27:38 crc kubenswrapper[4879]: E1125 16:27:38.104715 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab\": container with ID starting with c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab not found: ID does not exist" containerID="c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.104757 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab"} err="failed to get container status \"c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab\": rpc error: code = NotFound desc = could not find container \"c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab\": container with ID starting with c03922e12313fa55a0fe747624493f3b32e9441b2cf648218c1a5973591866ab not found: ID does not exist" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.104783 4879 scope.go:117] "RemoveContainer" containerID="e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f" Nov 25 16:27:38 crc kubenswrapper[4879]: E1125 16:27:38.105011 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f\": container with ID starting with e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f not found: ID does not exist" containerID="e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.105035 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f"} err="failed to get container status \"e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f\": rpc error: code = NotFound desc = could not find container \"e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f\": container with ID starting with e15165ef33536702e587760108bd2849c972ceb6349b839a6656aa2a28e0d87f not found: ID does not exist" Nov 25 16:27:38 crc kubenswrapper[4879]: I1125 16:27:38.645275 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:27:38 crc kubenswrapper[4879]: E1125 16:27:38.645656 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:27:39 crc kubenswrapper[4879]: I1125 16:27:39.659137 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" path="/var/lib/kubelet/pods/3fb5d84c-2f45-4a62-9e0f-2daa042aad0d/volumes" Nov 25 16:27:53 crc kubenswrapper[4879]: I1125 16:27:53.644763 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:27:53 crc kubenswrapper[4879]: E1125 16:27:53.645709 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:28:06 crc kubenswrapper[4879]: I1125 16:28:06.645851 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:28:06 crc kubenswrapper[4879]: E1125 16:28:06.646727 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:28:20 crc kubenswrapper[4879]: I1125 16:28:20.645018 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:28:20 crc kubenswrapper[4879]: E1125 16:28:20.645931 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:28:32 crc kubenswrapper[4879]: I1125 16:28:32.644845 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:28:32 crc kubenswrapper[4879]: E1125 16:28:32.645824 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:28:44 crc kubenswrapper[4879]: I1125 16:28:44.645054 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:28:44 crc kubenswrapper[4879]: E1125 16:28:44.645897 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:28:59 crc kubenswrapper[4879]: I1125 16:28:59.651911 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:28:59 crc kubenswrapper[4879]: E1125 16:28:59.652850 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:29:12 crc kubenswrapper[4879]: I1125 16:29:12.644603 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:29:12 crc kubenswrapper[4879]: E1125 16:29:12.645394 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:29:25 crc kubenswrapper[4879]: I1125 16:29:25.644876 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:29:26 crc kubenswrapper[4879]: I1125 16:29:26.080514 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"33de6bef08f4f95d92cdea27ed2eb4d61c23cd00c5592f92856092979ab660a7"} Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.418634 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4cs5b"] Nov 25 16:29:38 crc kubenswrapper[4879]: E1125 16:29:38.420109 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="registry-server" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.420145 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="registry-server" Nov 25 16:29:38 crc kubenswrapper[4879]: E1125 16:29:38.420177 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="extract-utilities" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.420185 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="extract-utilities" Nov 25 16:29:38 crc kubenswrapper[4879]: E1125 16:29:38.420230 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="extract-content" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.420239 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="extract-content" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.420533 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fb5d84c-2f45-4a62-9e0f-2daa042aad0d" containerName="registry-server" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.422732 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.445961 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4cs5b"] Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.468841 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.468994 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-utilities\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.469053 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxj8m\" (UniqueName: \"kubernetes.io/projected/4336fbe0-9dc7-446d-b2c3-d09e19df9209-kube-api-access-cxj8m\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.570888 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxj8m\" (UniqueName: \"kubernetes.io/projected/4336fbe0-9dc7-446d-b2c3-d09e19df9209-kube-api-access-cxj8m\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.571165 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.571303 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-utilities\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.571953 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.572472 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-utilities\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.594616 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxj8m\" (UniqueName: \"kubernetes.io/projected/4336fbe0-9dc7-446d-b2c3-d09e19df9209-kube-api-access-cxj8m\") pod \"certified-operators-4cs5b\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:38 crc kubenswrapper[4879]: I1125 16:29:38.755369 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:39 crc kubenswrapper[4879]: W1125 16:29:39.324187 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4336fbe0_9dc7_446d_b2c3_d09e19df9209.slice/crio-b789a42540f2fb0d3d465d7e192d142eeb40ddeab512057a14f01bde6df07f5d WatchSource:0}: Error finding container b789a42540f2fb0d3d465d7e192d142eeb40ddeab512057a14f01bde6df07f5d: Status 404 returned error can't find the container with id b789a42540f2fb0d3d465d7e192d142eeb40ddeab512057a14f01bde6df07f5d Nov 25 16:29:39 crc kubenswrapper[4879]: I1125 16:29:39.330973 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4cs5b"] Nov 25 16:29:40 crc kubenswrapper[4879]: I1125 16:29:40.231672 4879 generic.go:334] "Generic (PLEG): container finished" podID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerID="b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180" exitCode=0 Nov 25 16:29:40 crc kubenswrapper[4879]: I1125 16:29:40.231776 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerDied","Data":"b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180"} Nov 25 16:29:40 crc kubenswrapper[4879]: I1125 16:29:40.231985 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerStarted","Data":"b789a42540f2fb0d3d465d7e192d142eeb40ddeab512057a14f01bde6df07f5d"} Nov 25 16:29:41 crc kubenswrapper[4879]: I1125 16:29:41.246196 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerStarted","Data":"bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403"} Nov 25 16:29:43 crc kubenswrapper[4879]: I1125 16:29:43.268019 4879 generic.go:334] "Generic (PLEG): container finished" podID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerID="bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403" exitCode=0 Nov 25 16:29:43 crc kubenswrapper[4879]: I1125 16:29:43.268090 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerDied","Data":"bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403"} Nov 25 16:29:44 crc kubenswrapper[4879]: I1125 16:29:44.284830 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerStarted","Data":"cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502"} Nov 25 16:29:44 crc kubenswrapper[4879]: I1125 16:29:44.312785 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4cs5b" podStartSLOduration=2.872724148 podStartE2EDuration="6.31276511s" podCreationTimestamp="2025-11-25 16:29:38 +0000 UTC" firstStartedPulling="2025-11-25 16:29:40.23421845 +0000 UTC m=+7471.837631521" lastFinishedPulling="2025-11-25 16:29:43.674259412 +0000 UTC m=+7475.277672483" observedRunningTime="2025-11-25 16:29:44.306934113 +0000 UTC m=+7475.910347194" watchObservedRunningTime="2025-11-25 16:29:44.31276511 +0000 UTC m=+7475.916178181" Nov 25 16:29:48 crc kubenswrapper[4879]: I1125 16:29:48.756348 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:48 crc kubenswrapper[4879]: I1125 16:29:48.756967 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:48 crc kubenswrapper[4879]: I1125 16:29:48.809687 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:49 crc kubenswrapper[4879]: I1125 16:29:49.393332 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:49 crc kubenswrapper[4879]: I1125 16:29:49.447772 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4cs5b"] Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.361006 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4cs5b" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="registry-server" containerID="cri-o://cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502" gracePeriod=2 Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.879836 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.919302 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-utilities\") pod \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.919546 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content\") pod \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.919711 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxj8m\" (UniqueName: \"kubernetes.io/projected/4336fbe0-9dc7-446d-b2c3-d09e19df9209-kube-api-access-cxj8m\") pod \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.920286 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-utilities" (OuterVolumeSpecName: "utilities") pod "4336fbe0-9dc7-446d-b2c3-d09e19df9209" (UID: "4336fbe0-9dc7-446d-b2c3-d09e19df9209"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.920561 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:29:51 crc kubenswrapper[4879]: I1125 16:29:51.927094 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4336fbe0-9dc7-446d-b2c3-d09e19df9209-kube-api-access-cxj8m" (OuterVolumeSpecName: "kube-api-access-cxj8m") pod "4336fbe0-9dc7-446d-b2c3-d09e19df9209" (UID: "4336fbe0-9dc7-446d-b2c3-d09e19df9209"). InnerVolumeSpecName "kube-api-access-cxj8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.023887 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxj8m\" (UniqueName: \"kubernetes.io/projected/4336fbe0-9dc7-446d-b2c3-d09e19df9209-kube-api-access-cxj8m\") on node \"crc\" DevicePath \"\"" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.371592 4879 generic.go:334] "Generic (PLEG): container finished" podID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerID="cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502" exitCode=0 Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.371632 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerDied","Data":"cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502"} Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.371970 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4cs5b" event={"ID":"4336fbe0-9dc7-446d-b2c3-d09e19df9209","Type":"ContainerDied","Data":"b789a42540f2fb0d3d465d7e192d142eeb40ddeab512057a14f01bde6df07f5d"} Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.372010 4879 scope.go:117] "RemoveContainer" containerID="cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.371675 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4cs5b" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.394362 4879 scope.go:117] "RemoveContainer" containerID="bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.416355 4879 scope.go:117] "RemoveContainer" containerID="b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.470679 4879 scope.go:117] "RemoveContainer" containerID="cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502" Nov 25 16:29:52 crc kubenswrapper[4879]: E1125 16:29:52.471239 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502\": container with ID starting with cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502 not found: ID does not exist" containerID="cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.471282 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502"} err="failed to get container status \"cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502\": rpc error: code = NotFound desc = could not find container \"cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502\": container with ID starting with cb05d634408e3b8dd5728a1a586b3018a45d3a1e61c6200aa9c0016c821de502 not found: ID does not exist" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.471311 4879 scope.go:117] "RemoveContainer" containerID="bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403" Nov 25 16:29:52 crc kubenswrapper[4879]: E1125 16:29:52.471584 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403\": container with ID starting with bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403 not found: ID does not exist" containerID="bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.471631 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403"} err="failed to get container status \"bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403\": rpc error: code = NotFound desc = could not find container \"bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403\": container with ID starting with bc2abd4567130f5c65e48f0c35a2ebb625dfc01ac3f999583f071d8038c26403 not found: ID does not exist" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.471686 4879 scope.go:117] "RemoveContainer" containerID="b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180" Nov 25 16:29:52 crc kubenswrapper[4879]: E1125 16:29:52.471952 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180\": container with ID starting with b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180 not found: ID does not exist" containerID="b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.471982 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180"} err="failed to get container status \"b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180\": rpc error: code = NotFound desc = could not find container \"b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180\": container with ID starting with b4f185af9c68da80ab918a8e7118f8e8d9a34c5ec220d8a41b62db41e61d4180 not found: ID does not exist" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.636553 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4336fbe0-9dc7-446d-b2c3-d09e19df9209" (UID: "4336fbe0-9dc7-446d-b2c3-d09e19df9209"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.636866 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content\") pod \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\" (UID: \"4336fbe0-9dc7-446d-b2c3-d09e19df9209\") " Nov 25 16:29:52 crc kubenswrapper[4879]: W1125 16:29:52.637021 4879 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/4336fbe0-9dc7-446d-b2c3-d09e19df9209/volumes/kubernetes.io~empty-dir/catalog-content Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.637751 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4336fbe0-9dc7-446d-b2c3-d09e19df9209" (UID: "4336fbe0-9dc7-446d-b2c3-d09e19df9209"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.715196 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4cs5b"] Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.736068 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4cs5b"] Nov 25 16:29:52 crc kubenswrapper[4879]: I1125 16:29:52.740353 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4336fbe0-9dc7-446d-b2c3-d09e19df9209-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:29:53 crc kubenswrapper[4879]: I1125 16:29:53.660239 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" path="/var/lib/kubelet/pods/4336fbe0-9dc7-446d-b2c3-d09e19df9209/volumes" Nov 25 16:29:59 crc kubenswrapper[4879]: I1125 16:29:59.460495 4879 generic.go:334] "Generic (PLEG): container finished" podID="524593c5-f8a9-41e4-99b0-2ef4f69b37b3" containerID="b3dd0603e76d25e06cbb5f1c3f81b63d69295e68f52d35813950c3747848650a" exitCode=0 Nov 25 16:29:59 crc kubenswrapper[4879]: I1125 16:29:59.460567 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" event={"ID":"524593c5-f8a9-41e4-99b0-2ef4f69b37b3","Type":"ContainerDied","Data":"b3dd0603e76d25e06cbb5f1c3f81b63d69295e68f52d35813950c3747848650a"} Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.151512 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr"] Nov 25 16:30:00 crc kubenswrapper[4879]: E1125 16:30:00.152533 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="extract-content" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.152559 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="extract-content" Nov 25 16:30:00 crc kubenswrapper[4879]: E1125 16:30:00.152594 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="registry-server" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.152601 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="registry-server" Nov 25 16:30:00 crc kubenswrapper[4879]: E1125 16:30:00.152614 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="extract-utilities" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.152621 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="extract-utilities" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.152832 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="4336fbe0-9dc7-446d-b2c3-d09e19df9209" containerName="registry-server" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.153769 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.157052 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.157052 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.166005 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr"] Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.325429 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/44056d5d-48ba-4c54-8469-716b4ded7a02-config-volume\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.325674 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/44056d5d-48ba-4c54-8469-716b4ded7a02-secret-volume\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.326016 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqx6b\" (UniqueName: \"kubernetes.io/projected/44056d5d-48ba-4c54-8469-716b4ded7a02-kube-api-access-gqx6b\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.428419 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/44056d5d-48ba-4c54-8469-716b4ded7a02-secret-volume\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.428553 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqx6b\" (UniqueName: \"kubernetes.io/projected/44056d5d-48ba-4c54-8469-716b4ded7a02-kube-api-access-gqx6b\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.428652 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/44056d5d-48ba-4c54-8469-716b4ded7a02-config-volume\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.429759 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/44056d5d-48ba-4c54-8469-716b4ded7a02-config-volume\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.437103 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/44056d5d-48ba-4c54-8469-716b4ded7a02-secret-volume\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.448653 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqx6b\" (UniqueName: \"kubernetes.io/projected/44056d5d-48ba-4c54-8469-716b4ded7a02-kube-api-access-gqx6b\") pod \"collect-profiles-29401470-nt4wr\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:00 crc kubenswrapper[4879]: I1125 16:30:00.476028 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.010005 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr"] Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.054544 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.147047 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-bootstrap-combined-ca-bundle\") pod \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.147561 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdkt8\" (UniqueName: \"kubernetes.io/projected/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-kube-api-access-wdkt8\") pod \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.147667 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ceph\") pod \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.147690 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-inventory\") pod \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.147779 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ssh-key\") pod \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\" (UID: \"524593c5-f8a9-41e4-99b0-2ef4f69b37b3\") " Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.154587 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "524593c5-f8a9-41e4-99b0-2ef4f69b37b3" (UID: "524593c5-f8a9-41e4-99b0-2ef4f69b37b3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.155802 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ceph" (OuterVolumeSpecName: "ceph") pod "524593c5-f8a9-41e4-99b0-2ef4f69b37b3" (UID: "524593c5-f8a9-41e4-99b0-2ef4f69b37b3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.155884 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-kube-api-access-wdkt8" (OuterVolumeSpecName: "kube-api-access-wdkt8") pod "524593c5-f8a9-41e4-99b0-2ef4f69b37b3" (UID: "524593c5-f8a9-41e4-99b0-2ef4f69b37b3"). InnerVolumeSpecName "kube-api-access-wdkt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.185530 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-inventory" (OuterVolumeSpecName: "inventory") pod "524593c5-f8a9-41e4-99b0-2ef4f69b37b3" (UID: "524593c5-f8a9-41e4-99b0-2ef4f69b37b3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.185837 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "524593c5-f8a9-41e4-99b0-2ef4f69b37b3" (UID: "524593c5-f8a9-41e4-99b0-2ef4f69b37b3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.250061 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.250242 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.250259 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.250272 4879 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.250287 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdkt8\" (UniqueName: \"kubernetes.io/projected/524593c5-f8a9-41e4-99b0-2ef4f69b37b3-kube-api-access-wdkt8\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.498561 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" event={"ID":"44056d5d-48ba-4c54-8469-716b4ded7a02","Type":"ContainerStarted","Data":"0ba4715e88851c7ae24365695d76ca82ace1dca9a17296e640ef2f37366bab34"} Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.498645 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" event={"ID":"44056d5d-48ba-4c54-8469-716b4ded7a02","Type":"ContainerStarted","Data":"f77d59ba1fc9617040e96f41172e070751e3c15626b31291a4ec623a0cb181f6"} Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.505580 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" event={"ID":"524593c5-f8a9-41e4-99b0-2ef4f69b37b3","Type":"ContainerDied","Data":"46529aaa1e4bd0b06550d212c6ddecde796938722555b3045f6df6f2a1156140"} Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.505626 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46529aaa1e4bd0b06550d212c6ddecde796938722555b3045f6df6f2a1156140" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.505695 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-fcw6q" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.539807 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" podStartSLOduration=1.5397813280000001 podStartE2EDuration="1.539781328s" podCreationTimestamp="2025-11-25 16:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:30:01.520832589 +0000 UTC m=+7493.124245680" watchObservedRunningTime="2025-11-25 16:30:01.539781328 +0000 UTC m=+7493.143194399" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.580566 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-b8l44"] Nov 25 16:30:01 crc kubenswrapper[4879]: E1125 16:30:01.582486 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="524593c5-f8a9-41e4-99b0-2ef4f69b37b3" containerName="bootstrap-openstack-openstack-cell1" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.582512 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="524593c5-f8a9-41e4-99b0-2ef4f69b37b3" containerName="bootstrap-openstack-openstack-cell1" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.582966 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="524593c5-f8a9-41e4-99b0-2ef4f69b37b3" containerName="bootstrap-openstack-openstack-cell1" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.584264 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.586508 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.586655 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.586853 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.589016 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.598049 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-b8l44"] Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.762299 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ceph\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.763035 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ssh-key\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.763149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-inventory\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.763319 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cm6f\" (UniqueName: \"kubernetes.io/projected/455bbc30-41d7-431c-b0ff-dcda077069cc-kube-api-access-4cm6f\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.867699 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ssh-key\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.867787 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-inventory\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.867856 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cm6f\" (UniqueName: \"kubernetes.io/projected/455bbc30-41d7-431c-b0ff-dcda077069cc-kube-api-access-4cm6f\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.867906 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ceph\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.872687 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-inventory\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.875079 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ceph\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.875670 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ssh-key\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.885875 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cm6f\" (UniqueName: \"kubernetes.io/projected/455bbc30-41d7-431c-b0ff-dcda077069cc-kube-api-access-4cm6f\") pod \"download-cache-openstack-openstack-cell1-b8l44\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:01 crc kubenswrapper[4879]: I1125 16:30:01.907882 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:30:02 crc kubenswrapper[4879]: I1125 16:30:02.463993 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-b8l44"] Nov 25 16:30:02 crc kubenswrapper[4879]: I1125 16:30:02.518008 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" event={"ID":"455bbc30-41d7-431c-b0ff-dcda077069cc","Type":"ContainerStarted","Data":"cb69f92f45f60611ddbc50ff5a52ca42ff1a5c1ac46ea7978396afb74e47a5da"} Nov 25 16:30:02 crc kubenswrapper[4879]: I1125 16:30:02.520324 4879 generic.go:334] "Generic (PLEG): container finished" podID="44056d5d-48ba-4c54-8469-716b4ded7a02" containerID="0ba4715e88851c7ae24365695d76ca82ace1dca9a17296e640ef2f37366bab34" exitCode=0 Nov 25 16:30:02 crc kubenswrapper[4879]: I1125 16:30:02.520363 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" event={"ID":"44056d5d-48ba-4c54-8469-716b4ded7a02","Type":"ContainerDied","Data":"0ba4715e88851c7ae24365695d76ca82ace1dca9a17296e640ef2f37366bab34"} Nov 25 16:30:03 crc kubenswrapper[4879]: I1125 16:30:03.533231 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" event={"ID":"455bbc30-41d7-431c-b0ff-dcda077069cc","Type":"ContainerStarted","Data":"dda36d9b9b0c5012164b5c2669b713b9d92f7cbbcf6e244be09ab3d6c47453ef"} Nov 25 16:30:03 crc kubenswrapper[4879]: I1125 16:30:03.564692 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" podStartSLOduration=1.993619619 podStartE2EDuration="2.564674856s" podCreationTimestamp="2025-11-25 16:30:01 +0000 UTC" firstStartedPulling="2025-11-25 16:30:02.481258167 +0000 UTC m=+7494.084671238" lastFinishedPulling="2025-11-25 16:30:03.052313404 +0000 UTC m=+7494.655726475" observedRunningTime="2025-11-25 16:30:03.551602816 +0000 UTC m=+7495.155015887" watchObservedRunningTime="2025-11-25 16:30:03.564674856 +0000 UTC m=+7495.168087927" Nov 25 16:30:03 crc kubenswrapper[4879]: I1125 16:30:03.969157 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.127210 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/44056d5d-48ba-4c54-8469-716b4ded7a02-config-volume\") pod \"44056d5d-48ba-4c54-8469-716b4ded7a02\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.127615 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqx6b\" (UniqueName: \"kubernetes.io/projected/44056d5d-48ba-4c54-8469-716b4ded7a02-kube-api-access-gqx6b\") pod \"44056d5d-48ba-4c54-8469-716b4ded7a02\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.127695 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/44056d5d-48ba-4c54-8469-716b4ded7a02-secret-volume\") pod \"44056d5d-48ba-4c54-8469-716b4ded7a02\" (UID: \"44056d5d-48ba-4c54-8469-716b4ded7a02\") " Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.128420 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44056d5d-48ba-4c54-8469-716b4ded7a02-config-volume" (OuterVolumeSpecName: "config-volume") pod "44056d5d-48ba-4c54-8469-716b4ded7a02" (UID: "44056d5d-48ba-4c54-8469-716b4ded7a02"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.135314 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44056d5d-48ba-4c54-8469-716b4ded7a02-kube-api-access-gqx6b" (OuterVolumeSpecName: "kube-api-access-gqx6b") pod "44056d5d-48ba-4c54-8469-716b4ded7a02" (UID: "44056d5d-48ba-4c54-8469-716b4ded7a02"). InnerVolumeSpecName "kube-api-access-gqx6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.136055 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44056d5d-48ba-4c54-8469-716b4ded7a02-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "44056d5d-48ba-4c54-8469-716b4ded7a02" (UID: "44056d5d-48ba-4c54-8469-716b4ded7a02"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.230581 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/44056d5d-48ba-4c54-8469-716b4ded7a02-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.230655 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/44056d5d-48ba-4c54-8469-716b4ded7a02-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.230670 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqx6b\" (UniqueName: \"kubernetes.io/projected/44056d5d-48ba-4c54-8469-716b4ded7a02-kube-api-access-gqx6b\") on node \"crc\" DevicePath \"\"" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.546274 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" event={"ID":"44056d5d-48ba-4c54-8469-716b4ded7a02","Type":"ContainerDied","Data":"f77d59ba1fc9617040e96f41172e070751e3c15626b31291a4ec623a0cb181f6"} Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.546341 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f77d59ba1fc9617040e96f41172e070751e3c15626b31291a4ec623a0cb181f6" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.546390 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr" Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.612909 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz"] Nov 25 16:30:04 crc kubenswrapper[4879]: I1125 16:30:04.623532 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401425-gj9sz"] Nov 25 16:30:05 crc kubenswrapper[4879]: I1125 16:30:05.663393 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a05c7385-b05c-4da3-b47d-25dfd77376b2" path="/var/lib/kubelet/pods/a05c7385-b05c-4da3-b47d-25dfd77376b2/volumes" Nov 25 16:30:57 crc kubenswrapper[4879]: I1125 16:30:57.308724 4879 scope.go:117] "RemoveContainer" containerID="13516a8fdb7ee64c776bf318974a6569e7f5c14a7aa72268e6adaa6c87f4140d" Nov 25 16:31:38 crc kubenswrapper[4879]: I1125 16:31:38.462980 4879 generic.go:334] "Generic (PLEG): container finished" podID="455bbc30-41d7-431c-b0ff-dcda077069cc" containerID="dda36d9b9b0c5012164b5c2669b713b9d92f7cbbcf6e244be09ab3d6c47453ef" exitCode=0 Nov 25 16:31:38 crc kubenswrapper[4879]: I1125 16:31:38.463074 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" event={"ID":"455bbc30-41d7-431c-b0ff-dcda077069cc","Type":"ContainerDied","Data":"dda36d9b9b0c5012164b5c2669b713b9d92f7cbbcf6e244be09ab3d6c47453ef"} Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.000164 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.045423 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cm6f\" (UniqueName: \"kubernetes.io/projected/455bbc30-41d7-431c-b0ff-dcda077069cc-kube-api-access-4cm6f\") pod \"455bbc30-41d7-431c-b0ff-dcda077069cc\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.045495 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ceph\") pod \"455bbc30-41d7-431c-b0ff-dcda077069cc\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.045683 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-inventory\") pod \"455bbc30-41d7-431c-b0ff-dcda077069cc\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.045814 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ssh-key\") pod \"455bbc30-41d7-431c-b0ff-dcda077069cc\" (UID: \"455bbc30-41d7-431c-b0ff-dcda077069cc\") " Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.056068 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ceph" (OuterVolumeSpecName: "ceph") pod "455bbc30-41d7-431c-b0ff-dcda077069cc" (UID: "455bbc30-41d7-431c-b0ff-dcda077069cc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.056234 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/455bbc30-41d7-431c-b0ff-dcda077069cc-kube-api-access-4cm6f" (OuterVolumeSpecName: "kube-api-access-4cm6f") pod "455bbc30-41d7-431c-b0ff-dcda077069cc" (UID: "455bbc30-41d7-431c-b0ff-dcda077069cc"). InnerVolumeSpecName "kube-api-access-4cm6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.081846 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-inventory" (OuterVolumeSpecName: "inventory") pod "455bbc30-41d7-431c-b0ff-dcda077069cc" (UID: "455bbc30-41d7-431c-b0ff-dcda077069cc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.082050 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "455bbc30-41d7-431c-b0ff-dcda077069cc" (UID: "455bbc30-41d7-431c-b0ff-dcda077069cc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.147643 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cm6f\" (UniqueName: \"kubernetes.io/projected/455bbc30-41d7-431c-b0ff-dcda077069cc-kube-api-access-4cm6f\") on node \"crc\" DevicePath \"\"" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.147699 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.147710 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.147720 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/455bbc30-41d7-431c-b0ff-dcda077069cc-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.489145 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" event={"ID":"455bbc30-41d7-431c-b0ff-dcda077069cc","Type":"ContainerDied","Data":"cb69f92f45f60611ddbc50ff5a52ca42ff1a5c1ac46ea7978396afb74e47a5da"} Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.489244 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb69f92f45f60611ddbc50ff5a52ca42ff1a5c1ac46ea7978396afb74e47a5da" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.489182 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-b8l44" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.593294 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-fxbrk"] Nov 25 16:31:40 crc kubenswrapper[4879]: E1125 16:31:40.594026 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44056d5d-48ba-4c54-8469-716b4ded7a02" containerName="collect-profiles" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.594048 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="44056d5d-48ba-4c54-8469-716b4ded7a02" containerName="collect-profiles" Nov 25 16:31:40 crc kubenswrapper[4879]: E1125 16:31:40.594099 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455bbc30-41d7-431c-b0ff-dcda077069cc" containerName="download-cache-openstack-openstack-cell1" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.594108 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="455bbc30-41d7-431c-b0ff-dcda077069cc" containerName="download-cache-openstack-openstack-cell1" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.594450 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="455bbc30-41d7-431c-b0ff-dcda077069cc" containerName="download-cache-openstack-openstack-cell1" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.594520 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="44056d5d-48ba-4c54-8469-716b4ded7a02" containerName="collect-profiles" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.595786 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.599215 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.599468 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.599694 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.599807 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.608242 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-fxbrk"] Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.660302 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ssh-key\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.660644 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ceph\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.660808 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4mxv\" (UniqueName: \"kubernetes.io/projected/89862778-c037-4fb8-8424-dfc99af4f6a4-kube-api-access-g4mxv\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.661141 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-inventory\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.763993 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ssh-key\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.764389 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ceph\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.764460 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4mxv\" (UniqueName: \"kubernetes.io/projected/89862778-c037-4fb8-8424-dfc99af4f6a4-kube-api-access-g4mxv\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.764568 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-inventory\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.768653 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ceph\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.768886 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ssh-key\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.768914 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-inventory\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.780943 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4mxv\" (UniqueName: \"kubernetes.io/projected/89862778-c037-4fb8-8424-dfc99af4f6a4-kube-api-access-g4mxv\") pod \"configure-network-openstack-openstack-cell1-fxbrk\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:40 crc kubenswrapper[4879]: I1125 16:31:40.923061 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:31:41 crc kubenswrapper[4879]: I1125 16:31:41.476835 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-fxbrk"] Nov 25 16:31:41 crc kubenswrapper[4879]: I1125 16:31:41.483191 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:31:41 crc kubenswrapper[4879]: I1125 16:31:41.499852 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" event={"ID":"89862778-c037-4fb8-8424-dfc99af4f6a4","Type":"ContainerStarted","Data":"881ffb11a33285ed58b839be5b34a4bedcdd0f9db6934e2d693accadeca7acfd"} Nov 25 16:31:43 crc kubenswrapper[4879]: I1125 16:31:43.524419 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" event={"ID":"89862778-c037-4fb8-8424-dfc99af4f6a4","Type":"ContainerStarted","Data":"4de933699918129bab85ace066556ff7d8ac68d6c4f9945fe14a2d83b7a3efa7"} Nov 25 16:31:43 crc kubenswrapper[4879]: I1125 16:31:43.540080 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" podStartSLOduration=2.533739982 podStartE2EDuration="3.540061165s" podCreationTimestamp="2025-11-25 16:31:40 +0000 UTC" firstStartedPulling="2025-11-25 16:31:41.482866111 +0000 UTC m=+7593.086279182" lastFinishedPulling="2025-11-25 16:31:42.489187294 +0000 UTC m=+7594.092600365" observedRunningTime="2025-11-25 16:31:43.539090938 +0000 UTC m=+7595.142504009" watchObservedRunningTime="2025-11-25 16:31:43.540061165 +0000 UTC m=+7595.143474236" Nov 25 16:31:47 crc kubenswrapper[4879]: I1125 16:31:47.408669 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:31:47 crc kubenswrapper[4879]: I1125 16:31:47.409226 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:32:17 crc kubenswrapper[4879]: I1125 16:32:17.409008 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:32:17 crc kubenswrapper[4879]: I1125 16:32:17.409595 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:32:47 crc kubenswrapper[4879]: I1125 16:32:47.408965 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:32:47 crc kubenswrapper[4879]: I1125 16:32:47.409691 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:32:47 crc kubenswrapper[4879]: I1125 16:32:47.409743 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:32:47 crc kubenswrapper[4879]: I1125 16:32:47.410564 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"33de6bef08f4f95d92cdea27ed2eb4d61c23cd00c5592f92856092979ab660a7"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:32:47 crc kubenswrapper[4879]: I1125 16:32:47.410622 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://33de6bef08f4f95d92cdea27ed2eb4d61c23cd00c5592f92856092979ab660a7" gracePeriod=600 Nov 25 16:32:48 crc kubenswrapper[4879]: I1125 16:32:48.118313 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="33de6bef08f4f95d92cdea27ed2eb4d61c23cd00c5592f92856092979ab660a7" exitCode=0 Nov 25 16:32:48 crc kubenswrapper[4879]: I1125 16:32:48.118429 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"33de6bef08f4f95d92cdea27ed2eb4d61c23cd00c5592f92856092979ab660a7"} Nov 25 16:32:48 crc kubenswrapper[4879]: I1125 16:32:48.118785 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf"} Nov 25 16:32:48 crc kubenswrapper[4879]: I1125 16:32:48.118816 4879 scope.go:117] "RemoveContainer" containerID="33c398117853d430d65b39adc802b4f62d27a8b1504794ea7bb2e44c8cb4fc83" Nov 25 16:33:05 crc kubenswrapper[4879]: I1125 16:33:05.290065 4879 generic.go:334] "Generic (PLEG): container finished" podID="89862778-c037-4fb8-8424-dfc99af4f6a4" containerID="4de933699918129bab85ace066556ff7d8ac68d6c4f9945fe14a2d83b7a3efa7" exitCode=0 Nov 25 16:33:05 crc kubenswrapper[4879]: I1125 16:33:05.290140 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" event={"ID":"89862778-c037-4fb8-8424-dfc99af4f6a4","Type":"ContainerDied","Data":"4de933699918129bab85ace066556ff7d8ac68d6c4f9945fe14a2d83b7a3efa7"} Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.740089 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.850909 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ceph\") pod \"89862778-c037-4fb8-8424-dfc99af4f6a4\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.851015 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4mxv\" (UniqueName: \"kubernetes.io/projected/89862778-c037-4fb8-8424-dfc99af4f6a4-kube-api-access-g4mxv\") pod \"89862778-c037-4fb8-8424-dfc99af4f6a4\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.851272 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ssh-key\") pod \"89862778-c037-4fb8-8424-dfc99af4f6a4\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.851337 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-inventory\") pod \"89862778-c037-4fb8-8424-dfc99af4f6a4\" (UID: \"89862778-c037-4fb8-8424-dfc99af4f6a4\") " Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.860256 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ceph" (OuterVolumeSpecName: "ceph") pod "89862778-c037-4fb8-8424-dfc99af4f6a4" (UID: "89862778-c037-4fb8-8424-dfc99af4f6a4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.872695 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89862778-c037-4fb8-8424-dfc99af4f6a4-kube-api-access-g4mxv" (OuterVolumeSpecName: "kube-api-access-g4mxv") pod "89862778-c037-4fb8-8424-dfc99af4f6a4" (UID: "89862778-c037-4fb8-8424-dfc99af4f6a4"). InnerVolumeSpecName "kube-api-access-g4mxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.885711 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-inventory" (OuterVolumeSpecName: "inventory") pod "89862778-c037-4fb8-8424-dfc99af4f6a4" (UID: "89862778-c037-4fb8-8424-dfc99af4f6a4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.887318 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "89862778-c037-4fb8-8424-dfc99af4f6a4" (UID: "89862778-c037-4fb8-8424-dfc99af4f6a4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.953914 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4mxv\" (UniqueName: \"kubernetes.io/projected/89862778-c037-4fb8-8424-dfc99af4f6a4-kube-api-access-g4mxv\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.954202 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.954217 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:06 crc kubenswrapper[4879]: I1125 16:33:06.954229 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/89862778-c037-4fb8-8424-dfc99af4f6a4-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.314032 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" event={"ID":"89862778-c037-4fb8-8424-dfc99af4f6a4","Type":"ContainerDied","Data":"881ffb11a33285ed58b839be5b34a4bedcdd0f9db6934e2d693accadeca7acfd"} Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.314343 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="881ffb11a33285ed58b839be5b34a4bedcdd0f9db6934e2d693accadeca7acfd" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.314104 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-fxbrk" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.424508 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-6b78g"] Nov 25 16:33:07 crc kubenswrapper[4879]: E1125 16:33:07.425432 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89862778-c037-4fb8-8424-dfc99af4f6a4" containerName="configure-network-openstack-openstack-cell1" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.425480 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="89862778-c037-4fb8-8424-dfc99af4f6a4" containerName="configure-network-openstack-openstack-cell1" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.425880 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="89862778-c037-4fb8-8424-dfc99af4f6a4" containerName="configure-network-openstack-openstack-cell1" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.427151 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.435891 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.436198 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.436264 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.436423 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.447975 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-6b78g"] Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.565907 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-inventory\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.566552 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n27wn\" (UniqueName: \"kubernetes.io/projected/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-kube-api-access-n27wn\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.566823 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ceph\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.567064 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ssh-key\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.668619 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ssh-key\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.668682 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-inventory\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.668769 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n27wn\" (UniqueName: \"kubernetes.io/projected/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-kube-api-access-n27wn\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.668811 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ceph\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.684520 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-inventory\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.684554 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ceph\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.687518 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ssh-key\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.687774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n27wn\" (UniqueName: \"kubernetes.io/projected/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-kube-api-access-n27wn\") pod \"validate-network-openstack-openstack-cell1-6b78g\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:07 crc kubenswrapper[4879]: I1125 16:33:07.776246 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:08 crc kubenswrapper[4879]: I1125 16:33:08.298402 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-6b78g"] Nov 25 16:33:08 crc kubenswrapper[4879]: I1125 16:33:08.325157 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" event={"ID":"7dee0d4d-6402-46fe-95d5-dc69efb5feb3","Type":"ContainerStarted","Data":"591b602687972de682445809e9b893998e2af18c9095d7ef17a70b4a2a691773"} Nov 25 16:33:09 crc kubenswrapper[4879]: I1125 16:33:09.346693 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" event={"ID":"7dee0d4d-6402-46fe-95d5-dc69efb5feb3","Type":"ContainerStarted","Data":"076765a2e925e2b16d8b534c1a3aa26feba2483195ac30192110b50c56165b51"} Nov 25 16:33:09 crc kubenswrapper[4879]: I1125 16:33:09.371510 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" podStartSLOduration=1.892897632 podStartE2EDuration="2.371487192s" podCreationTimestamp="2025-11-25 16:33:07 +0000 UTC" firstStartedPulling="2025-11-25 16:33:08.298746953 +0000 UTC m=+7679.902160024" lastFinishedPulling="2025-11-25 16:33:08.777336513 +0000 UTC m=+7680.380749584" observedRunningTime="2025-11-25 16:33:09.367457882 +0000 UTC m=+7680.970870963" watchObservedRunningTime="2025-11-25 16:33:09.371487192 +0000 UTC m=+7680.974900273" Nov 25 16:33:14 crc kubenswrapper[4879]: I1125 16:33:14.396372 4879 generic.go:334] "Generic (PLEG): container finished" podID="7dee0d4d-6402-46fe-95d5-dc69efb5feb3" containerID="076765a2e925e2b16d8b534c1a3aa26feba2483195ac30192110b50c56165b51" exitCode=0 Nov 25 16:33:14 crc kubenswrapper[4879]: I1125 16:33:14.396462 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" event={"ID":"7dee0d4d-6402-46fe-95d5-dc69efb5feb3","Type":"ContainerDied","Data":"076765a2e925e2b16d8b534c1a3aa26feba2483195ac30192110b50c56165b51"} Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.845988 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.936431 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n27wn\" (UniqueName: \"kubernetes.io/projected/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-kube-api-access-n27wn\") pod \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.936580 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-inventory\") pod \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.936647 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ssh-key\") pod \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.936786 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ceph\") pod \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\" (UID: \"7dee0d4d-6402-46fe-95d5-dc69efb5feb3\") " Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.943358 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ceph" (OuterVolumeSpecName: "ceph") pod "7dee0d4d-6402-46fe-95d5-dc69efb5feb3" (UID: "7dee0d4d-6402-46fe-95d5-dc69efb5feb3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.943404 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-kube-api-access-n27wn" (OuterVolumeSpecName: "kube-api-access-n27wn") pod "7dee0d4d-6402-46fe-95d5-dc69efb5feb3" (UID: "7dee0d4d-6402-46fe-95d5-dc69efb5feb3"). InnerVolumeSpecName "kube-api-access-n27wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.971883 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-inventory" (OuterVolumeSpecName: "inventory") pod "7dee0d4d-6402-46fe-95d5-dc69efb5feb3" (UID: "7dee0d4d-6402-46fe-95d5-dc69efb5feb3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:33:15 crc kubenswrapper[4879]: I1125 16:33:15.974112 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7dee0d4d-6402-46fe-95d5-dc69efb5feb3" (UID: "7dee0d4d-6402-46fe-95d5-dc69efb5feb3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.039311 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n27wn\" (UniqueName: \"kubernetes.io/projected/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-kube-api-access-n27wn\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.039827 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.039929 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.040017 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7dee0d4d-6402-46fe-95d5-dc69efb5feb3-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.415503 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" event={"ID":"7dee0d4d-6402-46fe-95d5-dc69efb5feb3","Type":"ContainerDied","Data":"591b602687972de682445809e9b893998e2af18c9095d7ef17a70b4a2a691773"} Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.415567 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="591b602687972de682445809e9b893998e2af18c9095d7ef17a70b4a2a691773" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.415645 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-6b78g" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.489750 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-xxrt8"] Nov 25 16:33:16 crc kubenswrapper[4879]: E1125 16:33:16.490496 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dee0d4d-6402-46fe-95d5-dc69efb5feb3" containerName="validate-network-openstack-openstack-cell1" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.490523 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dee0d4d-6402-46fe-95d5-dc69efb5feb3" containerName="validate-network-openstack-openstack-cell1" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.490891 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dee0d4d-6402-46fe-95d5-dc69efb5feb3" containerName="validate-network-openstack-openstack-cell1" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.491779 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.494648 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.494673 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.494836 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.495375 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.502422 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-xxrt8"] Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.550345 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ssh-key\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.550436 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-inventory\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.550466 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ceph\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.550852 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr5tw\" (UniqueName: \"kubernetes.io/projected/fde903cf-f64d-49b5-a9a6-fd0a054d979c-kube-api-access-fr5tw\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.652080 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr5tw\" (UniqueName: \"kubernetes.io/projected/fde903cf-f64d-49b5-a9a6-fd0a054d979c-kube-api-access-fr5tw\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.652208 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ssh-key\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.652242 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-inventory\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.652262 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ceph\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.656750 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-inventory\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.657110 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ceph\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.658922 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ssh-key\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.669307 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr5tw\" (UniqueName: \"kubernetes.io/projected/fde903cf-f64d-49b5-a9a6-fd0a054d979c-kube-api-access-fr5tw\") pod \"install-os-openstack-openstack-cell1-xxrt8\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:16 crc kubenswrapper[4879]: I1125 16:33:16.822966 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:33:17 crc kubenswrapper[4879]: I1125 16:33:17.327161 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-xxrt8"] Nov 25 16:33:17 crc kubenswrapper[4879]: I1125 16:33:17.425914 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" event={"ID":"fde903cf-f64d-49b5-a9a6-fd0a054d979c","Type":"ContainerStarted","Data":"15cc1eff83848f904639e9336d1bde932053ea380774cf8dc85bed2513b119f3"} Nov 25 16:33:18 crc kubenswrapper[4879]: I1125 16:33:18.437605 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" event={"ID":"fde903cf-f64d-49b5-a9a6-fd0a054d979c","Type":"ContainerStarted","Data":"2134d4bc698064a63517a7538d8caa650e46193d99c35223eca01ebccb8d4d32"} Nov 25 16:33:18 crc kubenswrapper[4879]: I1125 16:33:18.459844 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" podStartSLOduration=2.069011279 podStartE2EDuration="2.45982694s" podCreationTimestamp="2025-11-25 16:33:16 +0000 UTC" firstStartedPulling="2025-11-25 16:33:17.331517623 +0000 UTC m=+7688.934930694" lastFinishedPulling="2025-11-25 16:33:17.722333284 +0000 UTC m=+7689.325746355" observedRunningTime="2025-11-25 16:33:18.452494989 +0000 UTC m=+7690.055908060" watchObservedRunningTime="2025-11-25 16:33:18.45982694 +0000 UTC m=+7690.063240011" Nov 25 16:34:03 crc kubenswrapper[4879]: I1125 16:34:03.890275 4879 generic.go:334] "Generic (PLEG): container finished" podID="fde903cf-f64d-49b5-a9a6-fd0a054d979c" containerID="2134d4bc698064a63517a7538d8caa650e46193d99c35223eca01ebccb8d4d32" exitCode=0 Nov 25 16:34:03 crc kubenswrapper[4879]: I1125 16:34:03.890364 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" event={"ID":"fde903cf-f64d-49b5-a9a6-fd0a054d979c","Type":"ContainerDied","Data":"2134d4bc698064a63517a7538d8caa650e46193d99c35223eca01ebccb8d4d32"} Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.397626 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.513195 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr5tw\" (UniqueName: \"kubernetes.io/projected/fde903cf-f64d-49b5-a9a6-fd0a054d979c-kube-api-access-fr5tw\") pod \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.513274 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ssh-key\") pod \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.513333 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-inventory\") pod \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.513496 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ceph\") pod \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\" (UID: \"fde903cf-f64d-49b5-a9a6-fd0a054d979c\") " Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.521350 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ceph" (OuterVolumeSpecName: "ceph") pod "fde903cf-f64d-49b5-a9a6-fd0a054d979c" (UID: "fde903cf-f64d-49b5-a9a6-fd0a054d979c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.521889 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fde903cf-f64d-49b5-a9a6-fd0a054d979c-kube-api-access-fr5tw" (OuterVolumeSpecName: "kube-api-access-fr5tw") pod "fde903cf-f64d-49b5-a9a6-fd0a054d979c" (UID: "fde903cf-f64d-49b5-a9a6-fd0a054d979c"). InnerVolumeSpecName "kube-api-access-fr5tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.555759 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fde903cf-f64d-49b5-a9a6-fd0a054d979c" (UID: "fde903cf-f64d-49b5-a9a6-fd0a054d979c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.560607 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-inventory" (OuterVolumeSpecName: "inventory") pod "fde903cf-f64d-49b5-a9a6-fd0a054d979c" (UID: "fde903cf-f64d-49b5-a9a6-fd0a054d979c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.616384 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr5tw\" (UniqueName: \"kubernetes.io/projected/fde903cf-f64d-49b5-a9a6-fd0a054d979c-kube-api-access-fr5tw\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.616428 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.616439 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.616448 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fde903cf-f64d-49b5-a9a6-fd0a054d979c-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.911020 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" event={"ID":"fde903cf-f64d-49b5-a9a6-fd0a054d979c","Type":"ContainerDied","Data":"15cc1eff83848f904639e9336d1bde932053ea380774cf8dc85bed2513b119f3"} Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.911086 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15cc1eff83848f904639e9336d1bde932053ea380774cf8dc85bed2513b119f3" Nov 25 16:34:05 crc kubenswrapper[4879]: I1125 16:34:05.911173 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-xxrt8" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.006939 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-v42kw"] Nov 25 16:34:06 crc kubenswrapper[4879]: E1125 16:34:06.007990 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde903cf-f64d-49b5-a9a6-fd0a054d979c" containerName="install-os-openstack-openstack-cell1" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.008011 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde903cf-f64d-49b5-a9a6-fd0a054d979c" containerName="install-os-openstack-openstack-cell1" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.008323 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="fde903cf-f64d-49b5-a9a6-fd0a054d979c" containerName="install-os-openstack-openstack-cell1" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.009318 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.011607 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.011944 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.012886 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.012962 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.024501 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-v42kw"] Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.128382 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hng2x\" (UniqueName: \"kubernetes.io/projected/e6f04bfd-aca5-4e3b-b77e-301b43048202-kube-api-access-hng2x\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.128462 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ssh-key\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.128643 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-inventory\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.128843 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ceph\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.231415 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ssh-key\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.231551 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-inventory\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.231621 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ceph\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.231847 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hng2x\" (UniqueName: \"kubernetes.io/projected/e6f04bfd-aca5-4e3b-b77e-301b43048202-kube-api-access-hng2x\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.235789 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-inventory\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.236115 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ssh-key\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.237351 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ceph\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.252784 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hng2x\" (UniqueName: \"kubernetes.io/projected/e6f04bfd-aca5-4e3b-b77e-301b43048202-kube-api-access-hng2x\") pod \"configure-os-openstack-openstack-cell1-v42kw\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.329727 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.861839 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-v42kw"] Nov 25 16:34:06 crc kubenswrapper[4879]: I1125 16:34:06.922369 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" event={"ID":"e6f04bfd-aca5-4e3b-b77e-301b43048202","Type":"ContainerStarted","Data":"cfc136e3a23028b6b6034e5cfb0a2b08ca345906f322351dcff299f5ad768b6b"} Nov 25 16:34:07 crc kubenswrapper[4879]: I1125 16:34:07.938399 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" event={"ID":"e6f04bfd-aca5-4e3b-b77e-301b43048202","Type":"ContainerStarted","Data":"73972373f74f62b5b37fcdf66680b5ed7159b2cfe34073ea852514096a499091"} Nov 25 16:34:07 crc kubenswrapper[4879]: I1125 16:34:07.971916 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" podStartSLOduration=2.551809387 podStartE2EDuration="2.971892068s" podCreationTimestamp="2025-11-25 16:34:05 +0000 UTC" firstStartedPulling="2025-11-25 16:34:06.866414954 +0000 UTC m=+7738.469828025" lastFinishedPulling="2025-11-25 16:34:07.286497635 +0000 UTC m=+7738.889910706" observedRunningTime="2025-11-25 16:34:07.966300745 +0000 UTC m=+7739.569713836" watchObservedRunningTime="2025-11-25 16:34:07.971892068 +0000 UTC m=+7739.575305139" Nov 25 16:34:47 crc kubenswrapper[4879]: I1125 16:34:47.408388 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:34:47 crc kubenswrapper[4879]: I1125 16:34:47.408985 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:34:51 crc kubenswrapper[4879]: I1125 16:34:51.537610 4879 generic.go:334] "Generic (PLEG): container finished" podID="e6f04bfd-aca5-4e3b-b77e-301b43048202" containerID="73972373f74f62b5b37fcdf66680b5ed7159b2cfe34073ea852514096a499091" exitCode=0 Nov 25 16:34:51 crc kubenswrapper[4879]: I1125 16:34:51.537700 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" event={"ID":"e6f04bfd-aca5-4e3b-b77e-301b43048202","Type":"ContainerDied","Data":"73972373f74f62b5b37fcdf66680b5ed7159b2cfe34073ea852514096a499091"} Nov 25 16:34:52 crc kubenswrapper[4879]: I1125 16:34:52.981205 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.005664 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ssh-key\") pod \"e6f04bfd-aca5-4e3b-b77e-301b43048202\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.006142 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-inventory\") pod \"e6f04bfd-aca5-4e3b-b77e-301b43048202\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.006176 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ceph\") pod \"e6f04bfd-aca5-4e3b-b77e-301b43048202\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.006259 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hng2x\" (UniqueName: \"kubernetes.io/projected/e6f04bfd-aca5-4e3b-b77e-301b43048202-kube-api-access-hng2x\") pod \"e6f04bfd-aca5-4e3b-b77e-301b43048202\" (UID: \"e6f04bfd-aca5-4e3b-b77e-301b43048202\") " Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.014507 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6f04bfd-aca5-4e3b-b77e-301b43048202-kube-api-access-hng2x" (OuterVolumeSpecName: "kube-api-access-hng2x") pod "e6f04bfd-aca5-4e3b-b77e-301b43048202" (UID: "e6f04bfd-aca5-4e3b-b77e-301b43048202"). InnerVolumeSpecName "kube-api-access-hng2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.014834 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ceph" (OuterVolumeSpecName: "ceph") pod "e6f04bfd-aca5-4e3b-b77e-301b43048202" (UID: "e6f04bfd-aca5-4e3b-b77e-301b43048202"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.039655 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e6f04bfd-aca5-4e3b-b77e-301b43048202" (UID: "e6f04bfd-aca5-4e3b-b77e-301b43048202"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.045861 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-inventory" (OuterVolumeSpecName: "inventory") pod "e6f04bfd-aca5-4e3b-b77e-301b43048202" (UID: "e6f04bfd-aca5-4e3b-b77e-301b43048202"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.109227 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.109285 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.109297 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hng2x\" (UniqueName: \"kubernetes.io/projected/e6f04bfd-aca5-4e3b-b77e-301b43048202-kube-api-access-hng2x\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.109307 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e6f04bfd-aca5-4e3b-b77e-301b43048202-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.560938 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" event={"ID":"e6f04bfd-aca5-4e3b-b77e-301b43048202","Type":"ContainerDied","Data":"cfc136e3a23028b6b6034e5cfb0a2b08ca345906f322351dcff299f5ad768b6b"} Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.560979 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfc136e3a23028b6b6034e5cfb0a2b08ca345906f322351dcff299f5ad768b6b" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.561030 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-v42kw" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.681408 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-cnqj8"] Nov 25 16:34:53 crc kubenswrapper[4879]: E1125 16:34:53.681875 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f04bfd-aca5-4e3b-b77e-301b43048202" containerName="configure-os-openstack-openstack-cell1" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.681894 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f04bfd-aca5-4e3b-b77e-301b43048202" containerName="configure-os-openstack-openstack-cell1" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.682361 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f04bfd-aca5-4e3b-b77e-301b43048202" containerName="configure-os-openstack-openstack-cell1" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.684976 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.691687 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-cnqj8"] Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.692363 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.692535 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.693467 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.696445 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.723875 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ceph\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.724007 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2qvz\" (UniqueName: \"kubernetes.io/projected/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-kube-api-access-r2qvz\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.724164 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.724257 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-inventory-0\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.826584 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-inventory-0\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.826740 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ceph\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.826804 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2qvz\" (UniqueName: \"kubernetes.io/projected/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-kube-api-access-r2qvz\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.826891 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.830820 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.831698 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-inventory-0\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.841472 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ceph\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:53 crc kubenswrapper[4879]: I1125 16:34:53.847212 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2qvz\" (UniqueName: \"kubernetes.io/projected/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-kube-api-access-r2qvz\") pod \"ssh-known-hosts-openstack-cnqj8\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:54 crc kubenswrapper[4879]: I1125 16:34:54.007463 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:34:54 crc kubenswrapper[4879]: I1125 16:34:54.523331 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-cnqj8"] Nov 25 16:34:54 crc kubenswrapper[4879]: I1125 16:34:54.572029 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-cnqj8" event={"ID":"e4aa46b6-8d47-4b77-b5f9-58a3c7006429","Type":"ContainerStarted","Data":"5bb6349cadc5064bdb5920eb9c45d01d23337d2730a4a7b82732e8517cb62c63"} Nov 25 16:34:55 crc kubenswrapper[4879]: I1125 16:34:55.586194 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-cnqj8" event={"ID":"e4aa46b6-8d47-4b77-b5f9-58a3c7006429","Type":"ContainerStarted","Data":"12835b8fbc5af30313e555198ab5b1a1679a596e9b6e58731d191f1534910217"} Nov 25 16:34:56 crc kubenswrapper[4879]: I1125 16:34:56.623287 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-cnqj8" podStartSLOduration=2.83458672 podStartE2EDuration="3.623266325s" podCreationTimestamp="2025-11-25 16:34:53 +0000 UTC" firstStartedPulling="2025-11-25 16:34:54.530267168 +0000 UTC m=+7786.133680239" lastFinishedPulling="2025-11-25 16:34:55.318946763 +0000 UTC m=+7786.922359844" observedRunningTime="2025-11-25 16:34:56.613813089 +0000 UTC m=+7788.217226160" watchObservedRunningTime="2025-11-25 16:34:56.623266325 +0000 UTC m=+7788.226679386" Nov 25 16:35:04 crc kubenswrapper[4879]: I1125 16:35:04.681316 4879 generic.go:334] "Generic (PLEG): container finished" podID="e4aa46b6-8d47-4b77-b5f9-58a3c7006429" containerID="12835b8fbc5af30313e555198ab5b1a1679a596e9b6e58731d191f1534910217" exitCode=0 Nov 25 16:35:04 crc kubenswrapper[4879]: I1125 16:35:04.681372 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-cnqj8" event={"ID":"e4aa46b6-8d47-4b77-b5f9-58a3c7006429","Type":"ContainerDied","Data":"12835b8fbc5af30313e555198ab5b1a1679a596e9b6e58731d191f1534910217"} Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.130152 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.191254 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-inventory-0\") pod \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.191418 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2qvz\" (UniqueName: \"kubernetes.io/projected/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-kube-api-access-r2qvz\") pod \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.191557 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ssh-key-openstack-cell1\") pod \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.191696 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ceph\") pod \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\" (UID: \"e4aa46b6-8d47-4b77-b5f9-58a3c7006429\") " Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.197154 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ceph" (OuterVolumeSpecName: "ceph") pod "e4aa46b6-8d47-4b77-b5f9-58a3c7006429" (UID: "e4aa46b6-8d47-4b77-b5f9-58a3c7006429"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.197370 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-kube-api-access-r2qvz" (OuterVolumeSpecName: "kube-api-access-r2qvz") pod "e4aa46b6-8d47-4b77-b5f9-58a3c7006429" (UID: "e4aa46b6-8d47-4b77-b5f9-58a3c7006429"). InnerVolumeSpecName "kube-api-access-r2qvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.224342 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "e4aa46b6-8d47-4b77-b5f9-58a3c7006429" (UID: "e4aa46b6-8d47-4b77-b5f9-58a3c7006429"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.226403 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "e4aa46b6-8d47-4b77-b5f9-58a3c7006429" (UID: "e4aa46b6-8d47-4b77-b5f9-58a3c7006429"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.293865 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.293900 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.293912 4879 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.293921 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2qvz\" (UniqueName: \"kubernetes.io/projected/e4aa46b6-8d47-4b77-b5f9-58a3c7006429-kube-api-access-r2qvz\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.703549 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-cnqj8" event={"ID":"e4aa46b6-8d47-4b77-b5f9-58a3c7006429","Type":"ContainerDied","Data":"5bb6349cadc5064bdb5920eb9c45d01d23337d2730a4a7b82732e8517cb62c63"} Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.703855 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bb6349cadc5064bdb5920eb9c45d01d23337d2730a4a7b82732e8517cb62c63" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.703650 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-cnqj8" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.780344 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-wcnjd"] Nov 25 16:35:06 crc kubenswrapper[4879]: E1125 16:35:06.781021 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4aa46b6-8d47-4b77-b5f9-58a3c7006429" containerName="ssh-known-hosts-openstack" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.781052 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4aa46b6-8d47-4b77-b5f9-58a3c7006429" containerName="ssh-known-hosts-openstack" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.781407 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4aa46b6-8d47-4b77-b5f9-58a3c7006429" containerName="ssh-known-hosts-openstack" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.782653 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.785639 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.786163 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.786438 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.786719 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.799904 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-wcnjd"] Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.803700 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-inventory\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.803871 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ssh-key\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.803944 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ceph\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.803996 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plcsk\" (UniqueName: \"kubernetes.io/projected/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-kube-api-access-plcsk\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.906264 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ssh-key\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.906339 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ceph\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.906374 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plcsk\" (UniqueName: \"kubernetes.io/projected/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-kube-api-access-plcsk\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.906439 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-inventory\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.912428 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ceph\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.912465 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-inventory\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.912903 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ssh-key\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:06 crc kubenswrapper[4879]: I1125 16:35:06.923774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plcsk\" (UniqueName: \"kubernetes.io/projected/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-kube-api-access-plcsk\") pod \"run-os-openstack-openstack-cell1-wcnjd\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:07 crc kubenswrapper[4879]: I1125 16:35:07.103033 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:07 crc kubenswrapper[4879]: I1125 16:35:07.656993 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-wcnjd"] Nov 25 16:35:07 crc kubenswrapper[4879]: I1125 16:35:07.717571 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" event={"ID":"2db6d808-8fb9-4a9c-a582-d56baec0b5e4","Type":"ContainerStarted","Data":"bba8405abe97010cf901b58917b5d3dbade292d590abb250de0a0fc911078708"} Nov 25 16:35:08 crc kubenswrapper[4879]: I1125 16:35:08.728521 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" event={"ID":"2db6d808-8fb9-4a9c-a582-d56baec0b5e4","Type":"ContainerStarted","Data":"3df04c47d4053010aa947ef5215ce952bb2b1b9d4af16c7a680f9459c4cad29f"} Nov 25 16:35:08 crc kubenswrapper[4879]: I1125 16:35:08.751640 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" podStartSLOduration=2.344785346 podStartE2EDuration="2.751622064s" podCreationTimestamp="2025-11-25 16:35:06 +0000 UTC" firstStartedPulling="2025-11-25 16:35:07.662085021 +0000 UTC m=+7799.265498092" lastFinishedPulling="2025-11-25 16:35:08.068921739 +0000 UTC m=+7799.672334810" observedRunningTime="2025-11-25 16:35:08.746897502 +0000 UTC m=+7800.350310583" watchObservedRunningTime="2025-11-25 16:35:08.751622064 +0000 UTC m=+7800.355035135" Nov 25 16:35:16 crc kubenswrapper[4879]: I1125 16:35:16.815333 4879 generic.go:334] "Generic (PLEG): container finished" podID="2db6d808-8fb9-4a9c-a582-d56baec0b5e4" containerID="3df04c47d4053010aa947ef5215ce952bb2b1b9d4af16c7a680f9459c4cad29f" exitCode=0 Nov 25 16:35:16 crc kubenswrapper[4879]: I1125 16:35:16.815463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" event={"ID":"2db6d808-8fb9-4a9c-a582-d56baec0b5e4","Type":"ContainerDied","Data":"3df04c47d4053010aa947ef5215ce952bb2b1b9d4af16c7a680f9459c4cad29f"} Nov 25 16:35:17 crc kubenswrapper[4879]: I1125 16:35:17.409320 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:35:17 crc kubenswrapper[4879]: I1125 16:35:17.409669 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.226751 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.279708 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-inventory\") pod \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.279949 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ceph\") pod \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.280820 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plcsk\" (UniqueName: \"kubernetes.io/projected/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-kube-api-access-plcsk\") pod \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.280895 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ssh-key\") pod \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\" (UID: \"2db6d808-8fb9-4a9c-a582-d56baec0b5e4\") " Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.285725 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ceph" (OuterVolumeSpecName: "ceph") pod "2db6d808-8fb9-4a9c-a582-d56baec0b5e4" (UID: "2db6d808-8fb9-4a9c-a582-d56baec0b5e4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.285986 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-kube-api-access-plcsk" (OuterVolumeSpecName: "kube-api-access-plcsk") pod "2db6d808-8fb9-4a9c-a582-d56baec0b5e4" (UID: "2db6d808-8fb9-4a9c-a582-d56baec0b5e4"). InnerVolumeSpecName "kube-api-access-plcsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.309492 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-inventory" (OuterVolumeSpecName: "inventory") pod "2db6d808-8fb9-4a9c-a582-d56baec0b5e4" (UID: "2db6d808-8fb9-4a9c-a582-d56baec0b5e4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.310248 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2db6d808-8fb9-4a9c-a582-d56baec0b5e4" (UID: "2db6d808-8fb9-4a9c-a582-d56baec0b5e4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.384361 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plcsk\" (UniqueName: \"kubernetes.io/projected/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-kube-api-access-plcsk\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.384398 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.384411 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.384424 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db6d808-8fb9-4a9c-a582-d56baec0b5e4-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.835542 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" event={"ID":"2db6d808-8fb9-4a9c-a582-d56baec0b5e4","Type":"ContainerDied","Data":"bba8405abe97010cf901b58917b5d3dbade292d590abb250de0a0fc911078708"} Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.835895 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bba8405abe97010cf901b58917b5d3dbade292d590abb250de0a0fc911078708" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.835563 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-wcnjd" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.905599 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-lxvxv"] Nov 25 16:35:18 crc kubenswrapper[4879]: E1125 16:35:18.906089 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db6d808-8fb9-4a9c-a582-d56baec0b5e4" containerName="run-os-openstack-openstack-cell1" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.906105 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db6d808-8fb9-4a9c-a582-d56baec0b5e4" containerName="run-os-openstack-openstack-cell1" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.906383 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db6d808-8fb9-4a9c-a582-d56baec0b5e4" containerName="run-os-openstack-openstack-cell1" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.907344 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.909332 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.910195 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.910394 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.910539 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.919923 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-lxvxv"] Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.998697 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-inventory\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.998778 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.998878 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw272\" (UniqueName: \"kubernetes.io/projected/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-kube-api-access-cw272\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:18 crc kubenswrapper[4879]: I1125 16:35:18.998976 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ceph\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.101494 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.101814 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw272\" (UniqueName: \"kubernetes.io/projected/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-kube-api-access-cw272\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.101980 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ceph\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.102177 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-inventory\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.106424 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.107503 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ceph\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.107619 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-inventory\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.121960 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw272\" (UniqueName: \"kubernetes.io/projected/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-kube-api-access-cw272\") pod \"reboot-os-openstack-openstack-cell1-lxvxv\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:19 crc kubenswrapper[4879]: I1125 16:35:19.236428 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:20 crc kubenswrapper[4879]: I1125 16:35:19.766914 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-lxvxv"] Nov 25 16:35:20 crc kubenswrapper[4879]: I1125 16:35:19.846344 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" event={"ID":"3ad63e1e-3807-4ca6-8b47-e36e487f88f4","Type":"ContainerStarted","Data":"56fc60ec358f43dc04ce8ea8aa60d0e0a88b2bbfac19d6c2211d3a24b8ea8746"} Nov 25 16:35:20 crc kubenswrapper[4879]: I1125 16:35:20.859539 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" event={"ID":"3ad63e1e-3807-4ca6-8b47-e36e487f88f4","Type":"ContainerStarted","Data":"9537d31daebc6ebfb24b1ca9d189600b124d44ab51c23e4163d2a86181f15c31"} Nov 25 16:35:20 crc kubenswrapper[4879]: I1125 16:35:20.885909 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" podStartSLOduration=2.19304265 podStartE2EDuration="2.88589129s" podCreationTimestamp="2025-11-25 16:35:18 +0000 UTC" firstStartedPulling="2025-11-25 16:35:19.76884565 +0000 UTC m=+7811.372258721" lastFinishedPulling="2025-11-25 16:35:20.46169429 +0000 UTC m=+7812.065107361" observedRunningTime="2025-11-25 16:35:20.87592539 +0000 UTC m=+7812.479338481" watchObservedRunningTime="2025-11-25 16:35:20.88589129 +0000 UTC m=+7812.489304361" Nov 25 16:35:37 crc kubenswrapper[4879]: I1125 16:35:37.013351 4879 generic.go:334] "Generic (PLEG): container finished" podID="3ad63e1e-3807-4ca6-8b47-e36e487f88f4" containerID="9537d31daebc6ebfb24b1ca9d189600b124d44ab51c23e4163d2a86181f15c31" exitCode=0 Nov 25 16:35:37 crc kubenswrapper[4879]: I1125 16:35:37.013455 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" event={"ID":"3ad63e1e-3807-4ca6-8b47-e36e487f88f4","Type":"ContainerDied","Data":"9537d31daebc6ebfb24b1ca9d189600b124d44ab51c23e4163d2a86181f15c31"} Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.453066 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.508702 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ceph\") pod \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.508781 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ssh-key\") pod \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.509336 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw272\" (UniqueName: \"kubernetes.io/projected/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-kube-api-access-cw272\") pod \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.509430 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-inventory\") pod \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\" (UID: \"3ad63e1e-3807-4ca6-8b47-e36e487f88f4\") " Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.514500 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ceph" (OuterVolumeSpecName: "ceph") pod "3ad63e1e-3807-4ca6-8b47-e36e487f88f4" (UID: "3ad63e1e-3807-4ca6-8b47-e36e487f88f4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.514780 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-kube-api-access-cw272" (OuterVolumeSpecName: "kube-api-access-cw272") pod "3ad63e1e-3807-4ca6-8b47-e36e487f88f4" (UID: "3ad63e1e-3807-4ca6-8b47-e36e487f88f4"). InnerVolumeSpecName "kube-api-access-cw272". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.542109 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3ad63e1e-3807-4ca6-8b47-e36e487f88f4" (UID: "3ad63e1e-3807-4ca6-8b47-e36e487f88f4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.542798 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-inventory" (OuterVolumeSpecName: "inventory") pod "3ad63e1e-3807-4ca6-8b47-e36e487f88f4" (UID: "3ad63e1e-3807-4ca6-8b47-e36e487f88f4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.612017 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw272\" (UniqueName: \"kubernetes.io/projected/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-kube-api-access-cw272\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.612065 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.612079 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:38 crc kubenswrapper[4879]: I1125 16:35:38.612092 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3ad63e1e-3807-4ca6-8b47-e36e487f88f4-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.033281 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" event={"ID":"3ad63e1e-3807-4ca6-8b47-e36e487f88f4","Type":"ContainerDied","Data":"56fc60ec358f43dc04ce8ea8aa60d0e0a88b2bbfac19d6c2211d3a24b8ea8746"} Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.033662 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56fc60ec358f43dc04ce8ea8aa60d0e0a88b2bbfac19d6c2211d3a24b8ea8746" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.033348 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lxvxv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.109061 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-tp6gv"] Nov 25 16:35:39 crc kubenswrapper[4879]: E1125 16:35:39.109560 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad63e1e-3807-4ca6-8b47-e36e487f88f4" containerName="reboot-os-openstack-openstack-cell1" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.109577 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad63e1e-3807-4ca6-8b47-e36e487f88f4" containerName="reboot-os-openstack-openstack-cell1" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.109771 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad63e1e-3807-4ca6-8b47-e36e487f88f4" containerName="reboot-os-openstack-openstack-cell1" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.110663 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.112927 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.117554 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.118755 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.124442 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.158595 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-tp6gv"] Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.228966 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.229236 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.229513 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.229581 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.229626 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.229823 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztf2m\" (UniqueName: \"kubernetes.io/projected/c776aec9-eb9c-4b2f-b53c-0f0807213067-kube-api-access-ztf2m\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.229939 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.230163 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.230212 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-inventory\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.230298 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ssh-key\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.230318 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ceph\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.230372 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332600 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztf2m\" (UniqueName: \"kubernetes.io/projected/c776aec9-eb9c-4b2f-b53c-0f0807213067-kube-api-access-ztf2m\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332664 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332736 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332762 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-inventory\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332817 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ssh-key\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332843 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ceph\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332882 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332949 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.332975 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.333038 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.333069 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.333098 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.349362 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.349717 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ceph\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.354011 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.355024 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.356788 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ssh-key\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.356902 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.356983 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.357618 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-inventory\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.357834 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.360607 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.360674 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.361440 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztf2m\" (UniqueName: \"kubernetes.io/projected/c776aec9-eb9c-4b2f-b53c-0f0807213067-kube-api-access-ztf2m\") pod \"install-certs-openstack-openstack-cell1-tp6gv\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.437662 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:39 crc kubenswrapper[4879]: W1125 16:35:39.968971 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc776aec9_eb9c_4b2f_b53c_0f0807213067.slice/crio-58fd38682cc1a1b1df9997d97e5dc195998fc467fb6aee8d549f5079124fcc23 WatchSource:0}: Error finding container 58fd38682cc1a1b1df9997d97e5dc195998fc467fb6aee8d549f5079124fcc23: Status 404 returned error can't find the container with id 58fd38682cc1a1b1df9997d97e5dc195998fc467fb6aee8d549f5079124fcc23 Nov 25 16:35:39 crc kubenswrapper[4879]: I1125 16:35:39.973666 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-tp6gv"] Nov 25 16:35:40 crc kubenswrapper[4879]: I1125 16:35:40.048401 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" event={"ID":"c776aec9-eb9c-4b2f-b53c-0f0807213067","Type":"ContainerStarted","Data":"58fd38682cc1a1b1df9997d97e5dc195998fc467fb6aee8d549f5079124fcc23"} Nov 25 16:35:41 crc kubenswrapper[4879]: I1125 16:35:41.059847 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" event={"ID":"c776aec9-eb9c-4b2f-b53c-0f0807213067","Type":"ContainerStarted","Data":"d8bf74944b78b40510294a065f6c4a67b0e0f8d5643a8abffea012532a4d8743"} Nov 25 16:35:41 crc kubenswrapper[4879]: I1125 16:35:41.079369 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" podStartSLOduration=1.517085998 podStartE2EDuration="2.079347879s" podCreationTimestamp="2025-11-25 16:35:39 +0000 UTC" firstStartedPulling="2025-11-25 16:35:39.972636399 +0000 UTC m=+7831.576049470" lastFinishedPulling="2025-11-25 16:35:40.53489828 +0000 UTC m=+7832.138311351" observedRunningTime="2025-11-25 16:35:41.077922162 +0000 UTC m=+7832.681335243" watchObservedRunningTime="2025-11-25 16:35:41.079347879 +0000 UTC m=+7832.682760950" Nov 25 16:35:47 crc kubenswrapper[4879]: I1125 16:35:47.410151 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:35:47 crc kubenswrapper[4879]: I1125 16:35:47.410687 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:35:47 crc kubenswrapper[4879]: I1125 16:35:47.410733 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:35:47 crc kubenswrapper[4879]: I1125 16:35:47.411282 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:35:47 crc kubenswrapper[4879]: I1125 16:35:47.411325 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" gracePeriod=600 Nov 25 16:35:47 crc kubenswrapper[4879]: E1125 16:35:47.546933 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:35:48 crc kubenswrapper[4879]: I1125 16:35:48.131372 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" exitCode=0 Nov 25 16:35:48 crc kubenswrapper[4879]: I1125 16:35:48.131430 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf"} Nov 25 16:35:48 crc kubenswrapper[4879]: I1125 16:35:48.131468 4879 scope.go:117] "RemoveContainer" containerID="33de6bef08f4f95d92cdea27ed2eb4d61c23cd00c5592f92856092979ab660a7" Nov 25 16:35:48 crc kubenswrapper[4879]: I1125 16:35:48.132444 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:35:48 crc kubenswrapper[4879]: E1125 16:35:48.132827 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:35:58 crc kubenswrapper[4879]: I1125 16:35:58.235547 4879 generic.go:334] "Generic (PLEG): container finished" podID="c776aec9-eb9c-4b2f-b53c-0f0807213067" containerID="d8bf74944b78b40510294a065f6c4a67b0e0f8d5643a8abffea012532a4d8743" exitCode=0 Nov 25 16:35:58 crc kubenswrapper[4879]: I1125 16:35:58.235710 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" event={"ID":"c776aec9-eb9c-4b2f-b53c-0f0807213067","Type":"ContainerDied","Data":"d8bf74944b78b40510294a065f6c4a67b0e0f8d5643a8abffea012532a4d8743"} Nov 25 16:35:58 crc kubenswrapper[4879]: I1125 16:35:58.645283 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:35:58 crc kubenswrapper[4879]: E1125 16:35:58.645773 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.698311 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870471 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ovn-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870532 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-sriov-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870551 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-dhcp-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870601 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-libvirt-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870663 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztf2m\" (UniqueName: \"kubernetes.io/projected/c776aec9-eb9c-4b2f-b53c-0f0807213067-kube-api-access-ztf2m\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870752 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-telemetry-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870812 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-bootstrap-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870855 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-inventory\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870904 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-metadata-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870924 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ceph\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870939 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ssh-key\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.870973 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-nova-combined-ca-bundle\") pod \"c776aec9-eb9c-4b2f-b53c-0f0807213067\" (UID: \"c776aec9-eb9c-4b2f-b53c-0f0807213067\") " Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.877546 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.877589 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.877742 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ceph" (OuterVolumeSpecName: "ceph") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.877859 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.877920 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.877938 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.878157 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.879170 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.879337 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.879554 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c776aec9-eb9c-4b2f-b53c-0f0807213067-kube-api-access-ztf2m" (OuterVolumeSpecName: "kube-api-access-ztf2m") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "kube-api-access-ztf2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.906147 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.908433 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-inventory" (OuterVolumeSpecName: "inventory") pod "c776aec9-eb9c-4b2f-b53c-0f0807213067" (UID: "c776aec9-eb9c-4b2f-b53c-0f0807213067"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986497 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986526 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986537 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986547 4879 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986557 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztf2m\" (UniqueName: \"kubernetes.io/projected/c776aec9-eb9c-4b2f-b53c-0f0807213067-kube-api-access-ztf2m\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986565 4879 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986573 4879 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986583 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986591 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986600 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986608 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:35:59 crc kubenswrapper[4879]: I1125 16:35:59.986617 4879 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c776aec9-eb9c-4b2f-b53c-0f0807213067-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.259792 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" event={"ID":"c776aec9-eb9c-4b2f-b53c-0f0807213067","Type":"ContainerDied","Data":"58fd38682cc1a1b1df9997d97e5dc195998fc467fb6aee8d549f5079124fcc23"} Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.259844 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-tp6gv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.259865 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58fd38682cc1a1b1df9997d97e5dc195998fc467fb6aee8d549f5079124fcc23" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.345952 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-dwnhv"] Nov 25 16:36:00 crc kubenswrapper[4879]: E1125 16:36:00.346527 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c776aec9-eb9c-4b2f-b53c-0f0807213067" containerName="install-certs-openstack-openstack-cell1" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.346554 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c776aec9-eb9c-4b2f-b53c-0f0807213067" containerName="install-certs-openstack-openstack-cell1" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.346820 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c776aec9-eb9c-4b2f-b53c-0f0807213067" containerName="install-certs-openstack-openstack-cell1" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.347628 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.349867 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.349931 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.356002 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.356099 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.362119 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-dwnhv"] Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.396347 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnvjj\" (UniqueName: \"kubernetes.io/projected/a7410331-ac6a-440c-ae28-b480ff1cee46-kube-api-access-rnvjj\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.396475 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-inventory\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.396603 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ceph\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.396642 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.498261 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnvjj\" (UniqueName: \"kubernetes.io/projected/a7410331-ac6a-440c-ae28-b480ff1cee46-kube-api-access-rnvjj\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.498349 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-inventory\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.498441 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ceph\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.498474 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.502066 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-inventory\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.502066 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ceph\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.502379 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.514793 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnvjj\" (UniqueName: \"kubernetes.io/projected/a7410331-ac6a-440c-ae28-b480ff1cee46-kube-api-access-rnvjj\") pod \"ceph-client-openstack-openstack-cell1-dwnhv\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:00 crc kubenswrapper[4879]: I1125 16:36:00.667915 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:01 crc kubenswrapper[4879]: I1125 16:36:01.186073 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-dwnhv"] Nov 25 16:36:01 crc kubenswrapper[4879]: I1125 16:36:01.271331 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" event={"ID":"a7410331-ac6a-440c-ae28-b480ff1cee46","Type":"ContainerStarted","Data":"29f07b48e5a12e1e3a3e706ecb5b629105ecc1f2dc2b9c8b487718c35a93af1d"} Nov 25 16:36:02 crc kubenswrapper[4879]: I1125 16:36:02.283112 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" event={"ID":"a7410331-ac6a-440c-ae28-b480ff1cee46","Type":"ContainerStarted","Data":"53574b8a6c24bdbe1c57d2b9a637fbc14acd301908188e13520d06819888770f"} Nov 25 16:36:02 crc kubenswrapper[4879]: I1125 16:36:02.299291 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" podStartSLOduration=1.83259769 podStartE2EDuration="2.299274664s" podCreationTimestamp="2025-11-25 16:36:00 +0000 UTC" firstStartedPulling="2025-11-25 16:36:01.197550403 +0000 UTC m=+7852.800963474" lastFinishedPulling="2025-11-25 16:36:01.664227377 +0000 UTC m=+7853.267640448" observedRunningTime="2025-11-25 16:36:02.299229603 +0000 UTC m=+7853.902642684" watchObservedRunningTime="2025-11-25 16:36:02.299274664 +0000 UTC m=+7853.902687735" Nov 25 16:36:06 crc kubenswrapper[4879]: I1125 16:36:06.835110 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hnlqp"] Nov 25 16:36:06 crc kubenswrapper[4879]: I1125 16:36:06.838346 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:06 crc kubenswrapper[4879]: I1125 16:36:06.853066 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hnlqp"] Nov 25 16:36:06 crc kubenswrapper[4879]: I1125 16:36:06.938041 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcxxf\" (UniqueName: \"kubernetes.io/projected/d2c39331-e67f-4756-88ea-c2d9419007ef-kube-api-access-vcxxf\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:06 crc kubenswrapper[4879]: I1125 16:36:06.938135 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-catalog-content\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:06 crc kubenswrapper[4879]: I1125 16:36:06.938374 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-utilities\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.041076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-utilities\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.041336 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcxxf\" (UniqueName: \"kubernetes.io/projected/d2c39331-e67f-4756-88ea-c2d9419007ef-kube-api-access-vcxxf\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.041395 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-catalog-content\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.041602 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-utilities\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.041911 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-catalog-content\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.073801 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcxxf\" (UniqueName: \"kubernetes.io/projected/d2c39331-e67f-4756-88ea-c2d9419007ef-kube-api-access-vcxxf\") pod \"redhat-marketplace-hnlqp\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.171794 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.336358 4879 generic.go:334] "Generic (PLEG): container finished" podID="a7410331-ac6a-440c-ae28-b480ff1cee46" containerID="53574b8a6c24bdbe1c57d2b9a637fbc14acd301908188e13520d06819888770f" exitCode=0 Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.336560 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" event={"ID":"a7410331-ac6a-440c-ae28-b480ff1cee46","Type":"ContainerDied","Data":"53574b8a6c24bdbe1c57d2b9a637fbc14acd301908188e13520d06819888770f"} Nov 25 16:36:07 crc kubenswrapper[4879]: I1125 16:36:07.671353 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hnlqp"] Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.348229 4879 generic.go:334] "Generic (PLEG): container finished" podID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerID="ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071" exitCode=0 Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.348491 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerDied","Data":"ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071"} Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.348608 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerStarted","Data":"cfb58385e7aafddeaed2b94210540e1c2546eab9c1dd60b9ac8ba21b206ec333"} Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.779765 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.884380 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnvjj\" (UniqueName: \"kubernetes.io/projected/a7410331-ac6a-440c-ae28-b480ff1cee46-kube-api-access-rnvjj\") pod \"a7410331-ac6a-440c-ae28-b480ff1cee46\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.884493 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ceph\") pod \"a7410331-ac6a-440c-ae28-b480ff1cee46\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.884855 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-inventory\") pod \"a7410331-ac6a-440c-ae28-b480ff1cee46\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.884906 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ssh-key\") pod \"a7410331-ac6a-440c-ae28-b480ff1cee46\" (UID: \"a7410331-ac6a-440c-ae28-b480ff1cee46\") " Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.892219 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ceph" (OuterVolumeSpecName: "ceph") pod "a7410331-ac6a-440c-ae28-b480ff1cee46" (UID: "a7410331-ac6a-440c-ae28-b480ff1cee46"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.892678 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7410331-ac6a-440c-ae28-b480ff1cee46-kube-api-access-rnvjj" (OuterVolumeSpecName: "kube-api-access-rnvjj") pod "a7410331-ac6a-440c-ae28-b480ff1cee46" (UID: "a7410331-ac6a-440c-ae28-b480ff1cee46"). InnerVolumeSpecName "kube-api-access-rnvjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.920338 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-inventory" (OuterVolumeSpecName: "inventory") pod "a7410331-ac6a-440c-ae28-b480ff1cee46" (UID: "a7410331-ac6a-440c-ae28-b480ff1cee46"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.922077 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a7410331-ac6a-440c-ae28-b480ff1cee46" (UID: "a7410331-ac6a-440c-ae28-b480ff1cee46"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.988546 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.988594 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.988609 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnvjj\" (UniqueName: \"kubernetes.io/projected/a7410331-ac6a-440c-ae28-b480ff1cee46-kube-api-access-rnvjj\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:08 crc kubenswrapper[4879]: I1125 16:36:08.988630 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a7410331-ac6a-440c-ae28-b480ff1cee46-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.366853 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerStarted","Data":"e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50"} Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.370022 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" event={"ID":"a7410331-ac6a-440c-ae28-b480ff1cee46","Type":"ContainerDied","Data":"29f07b48e5a12e1e3a3e706ecb5b629105ecc1f2dc2b9c8b487718c35a93af1d"} Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.370192 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29f07b48e5a12e1e3a3e706ecb5b629105ecc1f2dc2b9c8b487718c35a93af1d" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.370078 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-dwnhv" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.485459 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-d74xc"] Nov 25 16:36:09 crc kubenswrapper[4879]: E1125 16:36:09.486176 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7410331-ac6a-440c-ae28-b480ff1cee46" containerName="ceph-client-openstack-openstack-cell1" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.486205 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7410331-ac6a-440c-ae28-b480ff1cee46" containerName="ceph-client-openstack-openstack-cell1" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.486583 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7410331-ac6a-440c-ae28-b480ff1cee46" containerName="ceph-client-openstack-openstack-cell1" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.487982 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.491338 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.491680 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.491830 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.491976 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.492108 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.507419 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-d74xc"] Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.608475 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ceph\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.608622 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-inventory\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.608675 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.608864 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2ds2\" (UniqueName: \"kubernetes.io/projected/6e8a543e-eaec-4564-82e0-39184cf58bab-kube-api-access-l2ds2\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.608958 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6e8a543e-eaec-4564-82e0-39184cf58bab-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.609101 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ssh-key\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.645050 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:36:09 crc kubenswrapper[4879]: E1125 16:36:09.645623 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.710907 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2ds2\" (UniqueName: \"kubernetes.io/projected/6e8a543e-eaec-4564-82e0-39184cf58bab-kube-api-access-l2ds2\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.711278 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6e8a543e-eaec-4564-82e0-39184cf58bab-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.712934 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ssh-key\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.713277 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ceph\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.713476 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-inventory\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.713549 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.713636 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.715473 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.715805 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.719242 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.722479 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6e8a543e-eaec-4564-82e0-39184cf58bab-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.724741 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ceph\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.726906 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ssh-key\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.727884 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-inventory\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.729490 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2ds2\" (UniqueName: \"kubernetes.io/projected/6e8a543e-eaec-4564-82e0-39184cf58bab-kube-api-access-l2ds2\") pod \"ovn-openstack-openstack-cell1-d74xc\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.816218 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:36:09 crc kubenswrapper[4879]: I1125 16:36:09.824732 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:36:10 crc kubenswrapper[4879]: I1125 16:36:10.383847 4879 generic.go:334] "Generic (PLEG): container finished" podID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerID="e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50" exitCode=0 Nov 25 16:36:10 crc kubenswrapper[4879]: I1125 16:36:10.383920 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerDied","Data":"e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50"} Nov 25 16:36:10 crc kubenswrapper[4879]: I1125 16:36:10.384243 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-d74xc"] Nov 25 16:36:10 crc kubenswrapper[4879]: W1125 16:36:10.385092 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e8a543e_eaec_4564_82e0_39184cf58bab.slice/crio-cebf2ed6b1431ddfbb9911c42b3b9a4a5d3e579f957654afce3222bce2002443 WatchSource:0}: Error finding container cebf2ed6b1431ddfbb9911c42b3b9a4a5d3e579f957654afce3222bce2002443: Status 404 returned error can't find the container with id cebf2ed6b1431ddfbb9911c42b3b9a4a5d3e579f957654afce3222bce2002443 Nov 25 16:36:10 crc kubenswrapper[4879]: I1125 16:36:10.821297 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:36:11 crc kubenswrapper[4879]: I1125 16:36:11.398837 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-d74xc" event={"ID":"6e8a543e-eaec-4564-82e0-39184cf58bab","Type":"ContainerStarted","Data":"4729d6f0876dd28a6bbda6921e0bbad8b3de123cd6e99ec8e7127e224a183385"} Nov 25 16:36:11 crc kubenswrapper[4879]: I1125 16:36:11.399215 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-d74xc" event={"ID":"6e8a543e-eaec-4564-82e0-39184cf58bab","Type":"ContainerStarted","Data":"cebf2ed6b1431ddfbb9911c42b3b9a4a5d3e579f957654afce3222bce2002443"} Nov 25 16:36:11 crc kubenswrapper[4879]: I1125 16:36:11.401889 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerStarted","Data":"1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57"} Nov 25 16:36:11 crc kubenswrapper[4879]: I1125 16:36:11.429359 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-d74xc" podStartSLOduration=1.9972092529999999 podStartE2EDuration="2.429326158s" podCreationTimestamp="2025-11-25 16:36:09 +0000 UTC" firstStartedPulling="2025-11-25 16:36:10.386829508 +0000 UTC m=+7861.990242579" lastFinishedPulling="2025-11-25 16:36:10.818946413 +0000 UTC m=+7862.422359484" observedRunningTime="2025-11-25 16:36:11.414167443 +0000 UTC m=+7863.017580524" watchObservedRunningTime="2025-11-25 16:36:11.429326158 +0000 UTC m=+7863.032739229" Nov 25 16:36:11 crc kubenswrapper[4879]: I1125 16:36:11.451393 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hnlqp" podStartSLOduration=3.03197986 podStartE2EDuration="5.45135733s" podCreationTimestamp="2025-11-25 16:36:06 +0000 UTC" firstStartedPulling="2025-11-25 16:36:08.35256718 +0000 UTC m=+7859.955980251" lastFinishedPulling="2025-11-25 16:36:10.77194465 +0000 UTC m=+7862.375357721" observedRunningTime="2025-11-25 16:36:11.43250887 +0000 UTC m=+7863.035921941" watchObservedRunningTime="2025-11-25 16:36:11.45135733 +0000 UTC m=+7863.054770421" Nov 25 16:36:17 crc kubenswrapper[4879]: I1125 16:36:17.173188 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:17 crc kubenswrapper[4879]: I1125 16:36:17.173717 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:17 crc kubenswrapper[4879]: I1125 16:36:17.230434 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:17 crc kubenswrapper[4879]: I1125 16:36:17.507100 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:17 crc kubenswrapper[4879]: I1125 16:36:17.557543 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hnlqp"] Nov 25 16:36:19 crc kubenswrapper[4879]: I1125 16:36:19.478367 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hnlqp" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="registry-server" containerID="cri-o://1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57" gracePeriod=2 Nov 25 16:36:19 crc kubenswrapper[4879]: I1125 16:36:19.967113 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.155398 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-catalog-content\") pod \"d2c39331-e67f-4756-88ea-c2d9419007ef\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.156284 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcxxf\" (UniqueName: \"kubernetes.io/projected/d2c39331-e67f-4756-88ea-c2d9419007ef-kube-api-access-vcxxf\") pod \"d2c39331-e67f-4756-88ea-c2d9419007ef\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.156413 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-utilities\") pod \"d2c39331-e67f-4756-88ea-c2d9419007ef\" (UID: \"d2c39331-e67f-4756-88ea-c2d9419007ef\") " Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.157169 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-utilities" (OuterVolumeSpecName: "utilities") pod "d2c39331-e67f-4756-88ea-c2d9419007ef" (UID: "d2c39331-e67f-4756-88ea-c2d9419007ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.157829 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.162735 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2c39331-e67f-4756-88ea-c2d9419007ef-kube-api-access-vcxxf" (OuterVolumeSpecName: "kube-api-access-vcxxf") pod "d2c39331-e67f-4756-88ea-c2d9419007ef" (UID: "d2c39331-e67f-4756-88ea-c2d9419007ef"). InnerVolumeSpecName "kube-api-access-vcxxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.178561 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2c39331-e67f-4756-88ea-c2d9419007ef" (UID: "d2c39331-e67f-4756-88ea-c2d9419007ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.260313 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcxxf\" (UniqueName: \"kubernetes.io/projected/d2c39331-e67f-4756-88ea-c2d9419007ef-kube-api-access-vcxxf\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.260355 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2c39331-e67f-4756-88ea-c2d9419007ef-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.494187 4879 generic.go:334] "Generic (PLEG): container finished" podID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerID="1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57" exitCode=0 Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.494244 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerDied","Data":"1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57"} Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.494280 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hnlqp" event={"ID":"d2c39331-e67f-4756-88ea-c2d9419007ef","Type":"ContainerDied","Data":"cfb58385e7aafddeaed2b94210540e1c2546eab9c1dd60b9ac8ba21b206ec333"} Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.494306 4879 scope.go:117] "RemoveContainer" containerID="1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.494556 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hnlqp" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.526233 4879 scope.go:117] "RemoveContainer" containerID="e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.533250 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hnlqp"] Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.543466 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hnlqp"] Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.566523 4879 scope.go:117] "RemoveContainer" containerID="ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.598536 4879 scope.go:117] "RemoveContainer" containerID="1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57" Nov 25 16:36:20 crc kubenswrapper[4879]: E1125 16:36:20.599454 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57\": container with ID starting with 1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57 not found: ID does not exist" containerID="1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.599507 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57"} err="failed to get container status \"1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57\": rpc error: code = NotFound desc = could not find container \"1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57\": container with ID starting with 1a9c25c3da8eec591b003770101285872c184808f98b6e2ae1afa491216cec57 not found: ID does not exist" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.599538 4879 scope.go:117] "RemoveContainer" containerID="e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50" Nov 25 16:36:20 crc kubenswrapper[4879]: E1125 16:36:20.600289 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50\": container with ID starting with e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50 not found: ID does not exist" containerID="e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.600338 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50"} err="failed to get container status \"e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50\": rpc error: code = NotFound desc = could not find container \"e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50\": container with ID starting with e86e5bc741337149b7a556166b5edc4835d33bd49ee088821131dc368ce3da50 not found: ID does not exist" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.600365 4879 scope.go:117] "RemoveContainer" containerID="ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071" Nov 25 16:36:20 crc kubenswrapper[4879]: E1125 16:36:20.600830 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071\": container with ID starting with ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071 not found: ID does not exist" containerID="ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071" Nov 25 16:36:20 crc kubenswrapper[4879]: I1125 16:36:20.600856 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071"} err="failed to get container status \"ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071\": rpc error: code = NotFound desc = could not find container \"ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071\": container with ID starting with ec2775fb758df15e4c06210edb859669cfa8485ec73f1a3d0ca314220f02b071 not found: ID does not exist" Nov 25 16:36:21 crc kubenswrapper[4879]: I1125 16:36:21.658116 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" path="/var/lib/kubelet/pods/d2c39331-e67f-4756-88ea-c2d9419007ef/volumes" Nov 25 16:36:22 crc kubenswrapper[4879]: I1125 16:36:22.644706 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:36:22 crc kubenswrapper[4879]: E1125 16:36:22.645554 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:36:35 crc kubenswrapper[4879]: I1125 16:36:35.644496 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:36:35 crc kubenswrapper[4879]: E1125 16:36:35.646069 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.401592 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tv77m"] Nov 25 16:36:37 crc kubenswrapper[4879]: E1125 16:36:37.402788 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="extract-utilities" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.402804 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="extract-utilities" Nov 25 16:36:37 crc kubenswrapper[4879]: E1125 16:36:37.402821 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="registry-server" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.402828 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="registry-server" Nov 25 16:36:37 crc kubenswrapper[4879]: E1125 16:36:37.402847 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="extract-content" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.402854 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="extract-content" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.403087 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2c39331-e67f-4756-88ea-c2d9419007ef" containerName="registry-server" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.404816 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.416689 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tv77m"] Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.531472 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbk98\" (UniqueName: \"kubernetes.io/projected/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-kube-api-access-fbk98\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.531544 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-utilities\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.531763 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-catalog-content\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.633681 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbk98\" (UniqueName: \"kubernetes.io/projected/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-kube-api-access-fbk98\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.633743 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-utilities\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.633847 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-catalog-content\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.634377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-utilities\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.634386 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-catalog-content\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.659012 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbk98\" (UniqueName: \"kubernetes.io/projected/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-kube-api-access-fbk98\") pod \"community-operators-tv77m\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:37 crc kubenswrapper[4879]: I1125 16:36:37.730450 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:38 crc kubenswrapper[4879]: I1125 16:36:38.288242 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tv77m"] Nov 25 16:36:38 crc kubenswrapper[4879]: I1125 16:36:38.699078 4879 generic.go:334] "Generic (PLEG): container finished" podID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerID="648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495" exitCode=0 Nov 25 16:36:38 crc kubenswrapper[4879]: I1125 16:36:38.699171 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerDied","Data":"648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495"} Nov 25 16:36:38 crc kubenswrapper[4879]: I1125 16:36:38.699464 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerStarted","Data":"47860966289b142a171f59b19a28dfdaefef94810d9700d46e7a58487938b7fe"} Nov 25 16:36:39 crc kubenswrapper[4879]: I1125 16:36:39.711061 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerStarted","Data":"4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a"} Nov 25 16:36:40 crc kubenswrapper[4879]: I1125 16:36:40.722698 4879 generic.go:334] "Generic (PLEG): container finished" podID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerID="4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a" exitCode=0 Nov 25 16:36:40 crc kubenswrapper[4879]: I1125 16:36:40.722763 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerDied","Data":"4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a"} Nov 25 16:36:41 crc kubenswrapper[4879]: I1125 16:36:41.733966 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerStarted","Data":"cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820"} Nov 25 16:36:41 crc kubenswrapper[4879]: I1125 16:36:41.757918 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tv77m" podStartSLOduration=2.100359851 podStartE2EDuration="4.757899689s" podCreationTimestamp="2025-11-25 16:36:37 +0000 UTC" firstStartedPulling="2025-11-25 16:36:38.702170408 +0000 UTC m=+7890.305583479" lastFinishedPulling="2025-11-25 16:36:41.359710246 +0000 UTC m=+7892.963123317" observedRunningTime="2025-11-25 16:36:41.749850319 +0000 UTC m=+7893.353263390" watchObservedRunningTime="2025-11-25 16:36:41.757899689 +0000 UTC m=+7893.361312760" Nov 25 16:36:47 crc kubenswrapper[4879]: I1125 16:36:47.731542 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:47 crc kubenswrapper[4879]: I1125 16:36:47.732114 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:47 crc kubenswrapper[4879]: I1125 16:36:47.790617 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:47 crc kubenswrapper[4879]: I1125 16:36:47.846041 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:48 crc kubenswrapper[4879]: I1125 16:36:48.030317 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tv77m"] Nov 25 16:36:48 crc kubenswrapper[4879]: I1125 16:36:48.644987 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:36:48 crc kubenswrapper[4879]: E1125 16:36:48.645673 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:36:49 crc kubenswrapper[4879]: I1125 16:36:49.822255 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tv77m" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="registry-server" containerID="cri-o://cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820" gracePeriod=2 Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.301437 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.428445 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-utilities\") pod \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.428769 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbk98\" (UniqueName: \"kubernetes.io/projected/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-kube-api-access-fbk98\") pod \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.428948 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-catalog-content\") pod \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\" (UID: \"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d\") " Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.429521 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-utilities" (OuterVolumeSpecName: "utilities") pod "e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" (UID: "e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.436563 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-kube-api-access-fbk98" (OuterVolumeSpecName: "kube-api-access-fbk98") pod "e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" (UID: "e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d"). InnerVolumeSpecName "kube-api-access-fbk98". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.492437 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" (UID: "e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.531901 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.531948 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.531960 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbk98\" (UniqueName: \"kubernetes.io/projected/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d-kube-api-access-fbk98\") on node \"crc\" DevicePath \"\"" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.838513 4879 generic.go:334] "Generic (PLEG): container finished" podID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerID="cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820" exitCode=0 Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.838582 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerDied","Data":"cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820"} Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.838713 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tv77m" event={"ID":"e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d","Type":"ContainerDied","Data":"47860966289b142a171f59b19a28dfdaefef94810d9700d46e7a58487938b7fe"} Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.838635 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tv77m" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.838744 4879 scope.go:117] "RemoveContainer" containerID="cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.879890 4879 scope.go:117] "RemoveContainer" containerID="4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.887695 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tv77m"] Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.897555 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tv77m"] Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.909077 4879 scope.go:117] "RemoveContainer" containerID="648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.960512 4879 scope.go:117] "RemoveContainer" containerID="cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820" Nov 25 16:36:50 crc kubenswrapper[4879]: E1125 16:36:50.961152 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820\": container with ID starting with cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820 not found: ID does not exist" containerID="cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.961197 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820"} err="failed to get container status \"cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820\": rpc error: code = NotFound desc = could not find container \"cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820\": container with ID starting with cf0b35be4526cc97d38e7b51761d7b03a0d60d2f4a326486b7a321c7341b8820 not found: ID does not exist" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.961237 4879 scope.go:117] "RemoveContainer" containerID="4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a" Nov 25 16:36:50 crc kubenswrapper[4879]: E1125 16:36:50.961800 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a\": container with ID starting with 4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a not found: ID does not exist" containerID="4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.961839 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a"} err="failed to get container status \"4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a\": rpc error: code = NotFound desc = could not find container \"4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a\": container with ID starting with 4d02e94103d71318630ffc355e4a34c59ae66b5e92cee3d7176020998308145a not found: ID does not exist" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.961865 4879 scope.go:117] "RemoveContainer" containerID="648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495" Nov 25 16:36:50 crc kubenswrapper[4879]: E1125 16:36:50.962672 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495\": container with ID starting with 648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495 not found: ID does not exist" containerID="648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495" Nov 25 16:36:50 crc kubenswrapper[4879]: I1125 16:36:50.962714 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495"} err="failed to get container status \"648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495\": rpc error: code = NotFound desc = could not find container \"648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495\": container with ID starting with 648c940e4b23301191e1b476167224fdf0fac783f63d6dfab449408797d5c495 not found: ID does not exist" Nov 25 16:36:51 crc kubenswrapper[4879]: I1125 16:36:51.658281 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" path="/var/lib/kubelet/pods/e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d/volumes" Nov 25 16:37:02 crc kubenswrapper[4879]: I1125 16:37:02.645370 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:37:02 crc kubenswrapper[4879]: E1125 16:37:02.646222 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:37:17 crc kubenswrapper[4879]: I1125 16:37:17.107742 4879 generic.go:334] "Generic (PLEG): container finished" podID="6e8a543e-eaec-4564-82e0-39184cf58bab" containerID="4729d6f0876dd28a6bbda6921e0bbad8b3de123cd6e99ec8e7127e224a183385" exitCode=0 Nov 25 16:37:17 crc kubenswrapper[4879]: I1125 16:37:17.107832 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-d74xc" event={"ID":"6e8a543e-eaec-4564-82e0-39184cf58bab","Type":"ContainerDied","Data":"4729d6f0876dd28a6bbda6921e0bbad8b3de123cd6e99ec8e7127e224a183385"} Nov 25 16:37:17 crc kubenswrapper[4879]: I1125 16:37:17.645369 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:37:17 crc kubenswrapper[4879]: E1125 16:37:17.645762 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.535026 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.634008 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ovn-combined-ca-bundle\") pod \"6e8a543e-eaec-4564-82e0-39184cf58bab\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.634394 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-inventory\") pod \"6e8a543e-eaec-4564-82e0-39184cf58bab\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.634679 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ceph\") pod \"6e8a543e-eaec-4564-82e0-39184cf58bab\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.634794 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ssh-key\") pod \"6e8a543e-eaec-4564-82e0-39184cf58bab\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.634941 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6e8a543e-eaec-4564-82e0-39184cf58bab-ovncontroller-config-0\") pod \"6e8a543e-eaec-4564-82e0-39184cf58bab\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.635077 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2ds2\" (UniqueName: \"kubernetes.io/projected/6e8a543e-eaec-4564-82e0-39184cf58bab-kube-api-access-l2ds2\") pod \"6e8a543e-eaec-4564-82e0-39184cf58bab\" (UID: \"6e8a543e-eaec-4564-82e0-39184cf58bab\") " Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.641196 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "6e8a543e-eaec-4564-82e0-39184cf58bab" (UID: "6e8a543e-eaec-4564-82e0-39184cf58bab"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.641325 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ceph" (OuterVolumeSpecName: "ceph") pod "6e8a543e-eaec-4564-82e0-39184cf58bab" (UID: "6e8a543e-eaec-4564-82e0-39184cf58bab"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.641344 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e8a543e-eaec-4564-82e0-39184cf58bab-kube-api-access-l2ds2" (OuterVolumeSpecName: "kube-api-access-l2ds2") pod "6e8a543e-eaec-4564-82e0-39184cf58bab" (UID: "6e8a543e-eaec-4564-82e0-39184cf58bab"). InnerVolumeSpecName "kube-api-access-l2ds2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.663948 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-inventory" (OuterVolumeSpecName: "inventory") pod "6e8a543e-eaec-4564-82e0-39184cf58bab" (UID: "6e8a543e-eaec-4564-82e0-39184cf58bab"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.674318 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6e8a543e-eaec-4564-82e0-39184cf58bab" (UID: "6e8a543e-eaec-4564-82e0-39184cf58bab"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.675835 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8a543e-eaec-4564-82e0-39184cf58bab-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "6e8a543e-eaec-4564-82e0-39184cf58bab" (UID: "6e8a543e-eaec-4564-82e0-39184cf58bab"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.738201 4879 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.738236 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.738250 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.738261 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e8a543e-eaec-4564-82e0-39184cf58bab-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.738272 4879 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/6e8a543e-eaec-4564-82e0-39184cf58bab-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:37:18 crc kubenswrapper[4879]: I1125 16:37:18.738282 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2ds2\" (UniqueName: \"kubernetes.io/projected/6e8a543e-eaec-4564-82e0-39184cf58bab-kube-api-access-l2ds2\") on node \"crc\" DevicePath \"\"" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.126325 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-d74xc" event={"ID":"6e8a543e-eaec-4564-82e0-39184cf58bab","Type":"ContainerDied","Data":"cebf2ed6b1431ddfbb9911c42b3b9a4a5d3e579f957654afce3222bce2002443"} Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.126448 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cebf2ed6b1431ddfbb9911c42b3b9a4a5d3e579f957654afce3222bce2002443" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.126393 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-d74xc" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.209694 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-sv8vm"] Nov 25 16:37:19 crc kubenswrapper[4879]: E1125 16:37:19.210242 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="extract-content" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.210261 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="extract-content" Nov 25 16:37:19 crc kubenswrapper[4879]: E1125 16:37:19.210271 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="extract-utilities" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.210277 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="extract-utilities" Nov 25 16:37:19 crc kubenswrapper[4879]: E1125 16:37:19.210293 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="registry-server" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.210299 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="registry-server" Nov 25 16:37:19 crc kubenswrapper[4879]: E1125 16:37:19.210321 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e8a543e-eaec-4564-82e0-39184cf58bab" containerName="ovn-openstack-openstack-cell1" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.210327 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e8a543e-eaec-4564-82e0-39184cf58bab" containerName="ovn-openstack-openstack-cell1" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.210731 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e134e8c3-df6d-4cd9-aaf0-379d88ec3e9d" containerName="registry-server" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.210793 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e8a543e-eaec-4564-82e0-39184cf58bab" containerName="ovn-openstack-openstack-cell1" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.211617 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.214012 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.214252 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.214652 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.215780 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.215823 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.216400 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.223577 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-sv8vm"] Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351072 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351199 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351241 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351360 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351682 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351722 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bshkw\" (UniqueName: \"kubernetes.io/projected/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-kube-api-access-bshkw\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.351811 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.453943 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.454005 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bshkw\" (UniqueName: \"kubernetes.io/projected/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-kube-api-access-bshkw\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.454041 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.454086 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.454152 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.454181 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.454217 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.459426 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.459469 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.459520 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.459901 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.460037 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.460329 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.469878 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bshkw\" (UniqueName: \"kubernetes.io/projected/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-kube-api-access-bshkw\") pod \"neutron-metadata-openstack-openstack-cell1-sv8vm\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:19 crc kubenswrapper[4879]: I1125 16:37:19.535322 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:37:20 crc kubenswrapper[4879]: I1125 16:37:20.041394 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:37:20 crc kubenswrapper[4879]: I1125 16:37:20.042751 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-sv8vm"] Nov 25 16:37:20 crc kubenswrapper[4879]: I1125 16:37:20.137607 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" event={"ID":"eec6cf16-d1ed-4bba-a189-d4ce39dda66c","Type":"ContainerStarted","Data":"b6c5e90f5dceeca62ca4fcd01a966d0df8f93475c50287ffc7e1602fdd7500f4"} Nov 25 16:37:21 crc kubenswrapper[4879]: I1125 16:37:21.149258 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" event={"ID":"eec6cf16-d1ed-4bba-a189-d4ce39dda66c","Type":"ContainerStarted","Data":"1993492b0a0619fe450c319baef2fea476ff7c62af977faa89439f96ec0051de"} Nov 25 16:37:32 crc kubenswrapper[4879]: I1125 16:37:32.644896 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:37:32 crc kubenswrapper[4879]: E1125 16:37:32.645736 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:37:45 crc kubenswrapper[4879]: I1125 16:37:45.645170 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:37:45 crc kubenswrapper[4879]: E1125 16:37:45.646101 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:37:57 crc kubenswrapper[4879]: I1125 16:37:57.645225 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:37:57 crc kubenswrapper[4879]: E1125 16:37:57.646099 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:38:10 crc kubenswrapper[4879]: I1125 16:38:10.644888 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:38:10 crc kubenswrapper[4879]: E1125 16:38:10.647277 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:38:12 crc kubenswrapper[4879]: I1125 16:38:12.686952 4879 generic.go:334] "Generic (PLEG): container finished" podID="eec6cf16-d1ed-4bba-a189-d4ce39dda66c" containerID="1993492b0a0619fe450c319baef2fea476ff7c62af977faa89439f96ec0051de" exitCode=0 Nov 25 16:38:12 crc kubenswrapper[4879]: I1125 16:38:12.687054 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" event={"ID":"eec6cf16-d1ed-4bba-a189-d4ce39dda66c","Type":"ContainerDied","Data":"1993492b0a0619fe450c319baef2fea476ff7c62af977faa89439f96ec0051de"} Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.242592 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336642 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bshkw\" (UniqueName: \"kubernetes.io/projected/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-kube-api-access-bshkw\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336698 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-metadata-combined-ca-bundle\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336717 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ceph\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336732 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-inventory\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336922 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-nova-metadata-neutron-config-0\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336944 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.336959 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ssh-key\") pod \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\" (UID: \"eec6cf16-d1ed-4bba-a189-d4ce39dda66c\") " Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.343304 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.343867 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ceph" (OuterVolumeSpecName: "ceph") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.350834 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-kube-api-access-bshkw" (OuterVolumeSpecName: "kube-api-access-bshkw") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "kube-api-access-bshkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.369052 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.371816 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.374645 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-inventory" (OuterVolumeSpecName: "inventory") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.377106 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "eec6cf16-d1ed-4bba-a189-d4ce39dda66c" (UID: "eec6cf16-d1ed-4bba-a189-d4ce39dda66c"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440234 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bshkw\" (UniqueName: \"kubernetes.io/projected/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-kube-api-access-bshkw\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440281 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440296 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440311 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440324 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440335 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.440347 4879 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/eec6cf16-d1ed-4bba-a189-d4ce39dda66c-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.707693 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" event={"ID":"eec6cf16-d1ed-4bba-a189-d4ce39dda66c","Type":"ContainerDied","Data":"b6c5e90f5dceeca62ca4fcd01a966d0df8f93475c50287ffc7e1602fdd7500f4"} Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.707749 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6c5e90f5dceeca62ca4fcd01a966d0df8f93475c50287ffc7e1602fdd7500f4" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.707761 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-sv8vm" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.801698 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-w86vc"] Nov 25 16:38:14 crc kubenswrapper[4879]: E1125 16:38:14.802272 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec6cf16-d1ed-4bba-a189-d4ce39dda66c" containerName="neutron-metadata-openstack-openstack-cell1" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.802291 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec6cf16-d1ed-4bba-a189-d4ce39dda66c" containerName="neutron-metadata-openstack-openstack-cell1" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.802514 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec6cf16-d1ed-4bba-a189-d4ce39dda66c" containerName="neutron-metadata-openstack-openstack-cell1" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.803302 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.805862 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.806049 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.806276 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.806415 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.806744 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.816409 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-w86vc"] Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.952363 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjz54\" (UniqueName: \"kubernetes.io/projected/6d4df058-78cf-4287-997d-36533614641c-kube-api-access-pjz54\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.952984 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ssh-key\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.953048 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.953108 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ceph\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.953262 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:14 crc kubenswrapper[4879]: I1125 16:38:14.953334 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-inventory\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.055792 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-inventory\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.055902 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjz54\" (UniqueName: \"kubernetes.io/projected/6d4df058-78cf-4287-997d-36533614641c-kube-api-access-pjz54\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.056051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ssh-key\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.056083 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.056143 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ceph\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.056219 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.060786 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ceph\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.060801 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-inventory\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.064085 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.064490 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ssh-key\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.077342 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.077715 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjz54\" (UniqueName: \"kubernetes.io/projected/6d4df058-78cf-4287-997d-36533614641c-kube-api-access-pjz54\") pod \"libvirt-openstack-openstack-cell1-w86vc\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.147080 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:38:15 crc kubenswrapper[4879]: I1125 16:38:15.892641 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-w86vc"] Nov 25 16:38:15 crc kubenswrapper[4879]: W1125 16:38:15.894663 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d4df058_78cf_4287_997d_36533614641c.slice/crio-c091193d9eb16af168e42c90f6a5a2b8eeffc6824631c63f43dc0cc98a0f09ba WatchSource:0}: Error finding container c091193d9eb16af168e42c90f6a5a2b8eeffc6824631c63f43dc0cc98a0f09ba: Status 404 returned error can't find the container with id c091193d9eb16af168e42c90f6a5a2b8eeffc6824631c63f43dc0cc98a0f09ba Nov 25 16:38:16 crc kubenswrapper[4879]: I1125 16:38:16.726556 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" event={"ID":"6d4df058-78cf-4287-997d-36533614641c","Type":"ContainerStarted","Data":"95e939f994ca097275acd83fbee217df3ae664e56a1b87feebae13a3335a19c7"} Nov 25 16:38:16 crc kubenswrapper[4879]: I1125 16:38:16.726827 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" event={"ID":"6d4df058-78cf-4287-997d-36533614641c","Type":"ContainerStarted","Data":"c091193d9eb16af168e42c90f6a5a2b8eeffc6824631c63f43dc0cc98a0f09ba"} Nov 25 16:38:16 crc kubenswrapper[4879]: I1125 16:38:16.755360 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" podStartSLOduration=2.327059043 podStartE2EDuration="2.755339638s" podCreationTimestamp="2025-11-25 16:38:14 +0000 UTC" firstStartedPulling="2025-11-25 16:38:15.896971461 +0000 UTC m=+7987.500384532" lastFinishedPulling="2025-11-25 16:38:16.325252056 +0000 UTC m=+7987.928665127" observedRunningTime="2025-11-25 16:38:16.75425228 +0000 UTC m=+7988.357665361" watchObservedRunningTime="2025-11-25 16:38:16.755339638 +0000 UTC m=+7988.358752709" Nov 25 16:38:24 crc kubenswrapper[4879]: I1125 16:38:24.644820 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:38:24 crc kubenswrapper[4879]: E1125 16:38:24.645558 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.263248 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lxx4p"] Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.274048 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.287079 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lxx4p"] Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.315564 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nclkg\" (UniqueName: \"kubernetes.io/projected/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-kube-api-access-nclkg\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.316051 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-catalog-content\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.316094 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-utilities\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.418188 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nclkg\" (UniqueName: \"kubernetes.io/projected/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-kube-api-access-nclkg\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.418270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-catalog-content\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.418299 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-utilities\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.418967 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-catalog-content\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.419105 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-utilities\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.440674 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nclkg\" (UniqueName: \"kubernetes.io/projected/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-kube-api-access-nclkg\") pod \"redhat-operators-lxx4p\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:26 crc kubenswrapper[4879]: I1125 16:38:26.613515 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:27 crc kubenswrapper[4879]: I1125 16:38:27.226649 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lxx4p"] Nov 25 16:38:27 crc kubenswrapper[4879]: I1125 16:38:27.856231 4879 generic.go:334] "Generic (PLEG): container finished" podID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerID="bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c" exitCode=0 Nov 25 16:38:27 crc kubenswrapper[4879]: I1125 16:38:27.856331 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerDied","Data":"bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c"} Nov 25 16:38:27 crc kubenswrapper[4879]: I1125 16:38:27.857578 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerStarted","Data":"b2113fd97b01bf223d2ea9a64bec0f69a509191e6a71183a9b2b42ca4d3e990c"} Nov 25 16:38:29 crc kubenswrapper[4879]: I1125 16:38:29.883129 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerStarted","Data":"c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889"} Nov 25 16:38:35 crc kubenswrapper[4879]: I1125 16:38:35.944107 4879 generic.go:334] "Generic (PLEG): container finished" podID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerID="c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889" exitCode=0 Nov 25 16:38:35 crc kubenswrapper[4879]: I1125 16:38:35.944164 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerDied","Data":"c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889"} Nov 25 16:38:36 crc kubenswrapper[4879]: I1125 16:38:36.644427 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:38:36 crc kubenswrapper[4879]: E1125 16:38:36.645348 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:38:36 crc kubenswrapper[4879]: I1125 16:38:36.959298 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerStarted","Data":"5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56"} Nov 25 16:38:36 crc kubenswrapper[4879]: I1125 16:38:36.981495 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lxx4p" podStartSLOduration=2.348631794 podStartE2EDuration="10.98146811s" podCreationTimestamp="2025-11-25 16:38:26 +0000 UTC" firstStartedPulling="2025-11-25 16:38:27.86009068 +0000 UTC m=+7999.463503751" lastFinishedPulling="2025-11-25 16:38:36.492926996 +0000 UTC m=+8008.096340067" observedRunningTime="2025-11-25 16:38:36.977495766 +0000 UTC m=+8008.580908837" watchObservedRunningTime="2025-11-25 16:38:36.98146811 +0000 UTC m=+8008.584881201" Nov 25 16:38:46 crc kubenswrapper[4879]: I1125 16:38:46.614451 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:46 crc kubenswrapper[4879]: I1125 16:38:46.615022 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:46 crc kubenswrapper[4879]: I1125 16:38:46.673674 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:47 crc kubenswrapper[4879]: I1125 16:38:47.116162 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:47 crc kubenswrapper[4879]: I1125 16:38:47.169165 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lxx4p"] Nov 25 16:38:47 crc kubenswrapper[4879]: I1125 16:38:47.645518 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:38:47 crc kubenswrapper[4879]: E1125 16:38:47.646174 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.087244 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lxx4p" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="registry-server" containerID="cri-o://5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56" gracePeriod=2 Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.610277 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.649623 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nclkg\" (UniqueName: \"kubernetes.io/projected/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-kube-api-access-nclkg\") pod \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.649762 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-catalog-content\") pod \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.650027 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-utilities\") pod \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\" (UID: \"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f\") " Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.650743 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-utilities" (OuterVolumeSpecName: "utilities") pod "1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" (UID: "1839eb2b-6a3a-47aa-ae89-2e9e73cf875f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.656531 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-kube-api-access-nclkg" (OuterVolumeSpecName: "kube-api-access-nclkg") pod "1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" (UID: "1839eb2b-6a3a-47aa-ae89-2e9e73cf875f"). InnerVolumeSpecName "kube-api-access-nclkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.754783 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.754820 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nclkg\" (UniqueName: \"kubernetes.io/projected/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-kube-api-access-nclkg\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.757640 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" (UID: "1839eb2b-6a3a-47aa-ae89-2e9e73cf875f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:38:49 crc kubenswrapper[4879]: I1125 16:38:49.856301 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.100209 4879 generic.go:334] "Generic (PLEG): container finished" podID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerID="5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56" exitCode=0 Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.100261 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerDied","Data":"5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56"} Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.100292 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxx4p" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.100309 4879 scope.go:117] "RemoveContainer" containerID="5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.100297 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxx4p" event={"ID":"1839eb2b-6a3a-47aa-ae89-2e9e73cf875f","Type":"ContainerDied","Data":"b2113fd97b01bf223d2ea9a64bec0f69a509191e6a71183a9b2b42ca4d3e990c"} Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.130662 4879 scope.go:117] "RemoveContainer" containerID="c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.140204 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lxx4p"] Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.153992 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lxx4p"] Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.163401 4879 scope.go:117] "RemoveContainer" containerID="bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.213801 4879 scope.go:117] "RemoveContainer" containerID="5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56" Nov 25 16:38:50 crc kubenswrapper[4879]: E1125 16:38:50.214595 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56\": container with ID starting with 5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56 not found: ID does not exist" containerID="5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.214647 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56"} err="failed to get container status \"5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56\": rpc error: code = NotFound desc = could not find container \"5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56\": container with ID starting with 5a86eaff2b77a3a9d30bbedc65ff0449092881ad0dcb6cd70ede20c880babc56 not found: ID does not exist" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.214680 4879 scope.go:117] "RemoveContainer" containerID="c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889" Nov 25 16:38:50 crc kubenswrapper[4879]: E1125 16:38:50.215519 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889\": container with ID starting with c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889 not found: ID does not exist" containerID="c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.215590 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889"} err="failed to get container status \"c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889\": rpc error: code = NotFound desc = could not find container \"c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889\": container with ID starting with c3ed8f53bb4f6db3b6a2e2e69ae505ac871a94e65268d0090f16a5badc1d5889 not found: ID does not exist" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.215627 4879 scope.go:117] "RemoveContainer" containerID="bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c" Nov 25 16:38:50 crc kubenswrapper[4879]: E1125 16:38:50.216814 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c\": container with ID starting with bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c not found: ID does not exist" containerID="bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c" Nov 25 16:38:50 crc kubenswrapper[4879]: I1125 16:38:50.216937 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c"} err="failed to get container status \"bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c\": rpc error: code = NotFound desc = could not find container \"bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c\": container with ID starting with bd2e1b31ca92277e44e3f67b01ed91eababccd1c29ba9878bc1f2ba87d21082c not found: ID does not exist" Nov 25 16:38:51 crc kubenswrapper[4879]: I1125 16:38:51.656085 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" path="/var/lib/kubelet/pods/1839eb2b-6a3a-47aa-ae89-2e9e73cf875f/volumes" Nov 25 16:38:59 crc kubenswrapper[4879]: I1125 16:38:59.652841 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:38:59 crc kubenswrapper[4879]: E1125 16:38:59.653578 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:39:13 crc kubenswrapper[4879]: I1125 16:39:13.644871 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:39:13 crc kubenswrapper[4879]: E1125 16:39:13.645777 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:39:28 crc kubenswrapper[4879]: I1125 16:39:28.644902 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:39:28 crc kubenswrapper[4879]: E1125 16:39:28.645673 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:39:39 crc kubenswrapper[4879]: I1125 16:39:39.653478 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:39:39 crc kubenswrapper[4879]: E1125 16:39:39.654256 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:39:54 crc kubenswrapper[4879]: I1125 16:39:54.645358 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:39:54 crc kubenswrapper[4879]: E1125 16:39:54.646160 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:40:07 crc kubenswrapper[4879]: I1125 16:40:07.645731 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:40:07 crc kubenswrapper[4879]: E1125 16:40:07.646563 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:40:18 crc kubenswrapper[4879]: I1125 16:40:18.649663 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:40:18 crc kubenswrapper[4879]: E1125 16:40:18.651101 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:40:31 crc kubenswrapper[4879]: I1125 16:40:31.645209 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:40:31 crc kubenswrapper[4879]: E1125 16:40:31.646007 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:40:42 crc kubenswrapper[4879]: I1125 16:40:42.644431 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:40:42 crc kubenswrapper[4879]: E1125 16:40:42.645243 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:40:56 crc kubenswrapper[4879]: I1125 16:40:56.644547 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:40:57 crc kubenswrapper[4879]: I1125 16:40:57.462745 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"b0bce0d1924c1d2381c525cd961767e65bda3be3231dcaf504e340ee185ec7b8"} Nov 25 16:42:49 crc kubenswrapper[4879]: I1125 16:42:49.630431 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d4df058-78cf-4287-997d-36533614641c" containerID="95e939f994ca097275acd83fbee217df3ae664e56a1b87feebae13a3335a19c7" exitCode=0 Nov 25 16:42:49 crc kubenswrapper[4879]: I1125 16:42:49.630523 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" event={"ID":"6d4df058-78cf-4287-997d-36533614641c","Type":"ContainerDied","Data":"95e939f994ca097275acd83fbee217df3ae664e56a1b87feebae13a3335a19c7"} Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.112100 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.208730 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-combined-ca-bundle\") pod \"6d4df058-78cf-4287-997d-36533614641c\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.208857 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjz54\" (UniqueName: \"kubernetes.io/projected/6d4df058-78cf-4287-997d-36533614641c-kube-api-access-pjz54\") pod \"6d4df058-78cf-4287-997d-36533614641c\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.209047 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-secret-0\") pod \"6d4df058-78cf-4287-997d-36533614641c\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.209134 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-inventory\") pod \"6d4df058-78cf-4287-997d-36533614641c\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.209182 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ssh-key\") pod \"6d4df058-78cf-4287-997d-36533614641c\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.209223 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ceph\") pod \"6d4df058-78cf-4287-997d-36533614641c\" (UID: \"6d4df058-78cf-4287-997d-36533614641c\") " Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.216661 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "6d4df058-78cf-4287-997d-36533614641c" (UID: "6d4df058-78cf-4287-997d-36533614641c"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.216902 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ceph" (OuterVolumeSpecName: "ceph") pod "6d4df058-78cf-4287-997d-36533614641c" (UID: "6d4df058-78cf-4287-997d-36533614641c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.217713 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d4df058-78cf-4287-997d-36533614641c-kube-api-access-pjz54" (OuterVolumeSpecName: "kube-api-access-pjz54") pod "6d4df058-78cf-4287-997d-36533614641c" (UID: "6d4df058-78cf-4287-997d-36533614641c"). InnerVolumeSpecName "kube-api-access-pjz54". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.240547 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "6d4df058-78cf-4287-997d-36533614641c" (UID: "6d4df058-78cf-4287-997d-36533614641c"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.241618 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-inventory" (OuterVolumeSpecName: "inventory") pod "6d4df058-78cf-4287-997d-36533614641c" (UID: "6d4df058-78cf-4287-997d-36533614641c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.252098 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d4df058-78cf-4287-997d-36533614641c" (UID: "6d4df058-78cf-4287-997d-36533614641c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.312608 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.312917 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.312929 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.312942 4879 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.312955 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjz54\" (UniqueName: \"kubernetes.io/projected/6d4df058-78cf-4287-997d-36533614641c-kube-api-access-pjz54\") on node \"crc\" DevicePath \"\"" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.312968 4879 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d4df058-78cf-4287-997d-36533614641c-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.651551 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.660629 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-w86vc" event={"ID":"6d4df058-78cf-4287-997d-36533614641c","Type":"ContainerDied","Data":"c091193d9eb16af168e42c90f6a5a2b8eeffc6824631c63f43dc0cc98a0f09ba"} Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.660674 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c091193d9eb16af168e42c90f6a5a2b8eeffc6824631c63f43dc0cc98a0f09ba" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.753146 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-sng5x"] Nov 25 16:42:51 crc kubenswrapper[4879]: E1125 16:42:51.753745 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="registry-server" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.753769 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="registry-server" Nov 25 16:42:51 crc kubenswrapper[4879]: E1125 16:42:51.753796 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d4df058-78cf-4287-997d-36533614641c" containerName="libvirt-openstack-openstack-cell1" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.753806 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d4df058-78cf-4287-997d-36533614641c" containerName="libvirt-openstack-openstack-cell1" Nov 25 16:42:51 crc kubenswrapper[4879]: E1125 16:42:51.753832 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="extract-utilities" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.753840 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="extract-utilities" Nov 25 16:42:51 crc kubenswrapper[4879]: E1125 16:42:51.753849 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="extract-content" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.753856 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="extract-content" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.754172 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="1839eb2b-6a3a-47aa-ae89-2e9e73cf875f" containerName="registry-server" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.754199 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d4df058-78cf-4287-997d-36533614641c" containerName="libvirt-openstack-openstack-cell1" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.755403 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.757580 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.757901 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.757903 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.758038 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.758136 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.758270 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.759752 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.765541 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-sng5x"] Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.924711 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.924791 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.924843 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.924879 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.924967 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.925040 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.925088 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ceph\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.925149 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg7dr\" (UniqueName: \"kubernetes.io/projected/f09196b3-7c17-4117-9733-77a97644d23c-kube-api-access-wg7dr\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.925225 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-inventory\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.925292 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:51 crc kubenswrapper[4879]: I1125 16:42:51.925340 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.026956 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg7dr\" (UniqueName: \"kubernetes.io/projected/f09196b3-7c17-4117-9733-77a97644d23c-kube-api-access-wg7dr\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-inventory\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027139 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027190 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027253 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027287 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027314 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027344 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027414 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027500 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.027543 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ceph\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.028701 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.042809 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-inventory\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.044055 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.047813 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ceph\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.051237 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.058646 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.060588 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.071899 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg7dr\" (UniqueName: \"kubernetes.io/projected/f09196b3-7c17-4117-9733-77a97644d23c-kube-api-access-wg7dr\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.073628 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.074167 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.077731 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-sng5x\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.078622 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.709074 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:42:52 crc kubenswrapper[4879]: I1125 16:42:52.710071 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-sng5x"] Nov 25 16:42:53 crc kubenswrapper[4879]: I1125 16:42:53.671535 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" event={"ID":"f09196b3-7c17-4117-9733-77a97644d23c","Type":"ContainerStarted","Data":"81e8130393ed0124ce2b03a9d0786e0ccce2d48f8cd2d9494cc7e2bc56d073e7"} Nov 25 16:42:53 crc kubenswrapper[4879]: I1125 16:42:53.673371 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" event={"ID":"f09196b3-7c17-4117-9733-77a97644d23c","Type":"ContainerStarted","Data":"dff36971175c1f970d324ef956e4bd6da07477f78477cb37016c07cb8bf9ff0b"} Nov 25 16:42:53 crc kubenswrapper[4879]: I1125 16:42:53.694806 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" podStartSLOduration=2.276404508 podStartE2EDuration="2.694789295s" podCreationTimestamp="2025-11-25 16:42:51 +0000 UTC" firstStartedPulling="2025-11-25 16:42:52.708755804 +0000 UTC m=+8264.312168875" lastFinishedPulling="2025-11-25 16:42:53.127140591 +0000 UTC m=+8264.730553662" observedRunningTime="2025-11-25 16:42:53.690140573 +0000 UTC m=+8265.293553654" watchObservedRunningTime="2025-11-25 16:42:53.694789295 +0000 UTC m=+8265.298202366" Nov 25 16:43:17 crc kubenswrapper[4879]: I1125 16:43:17.408754 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:43:17 crc kubenswrapper[4879]: I1125 16:43:17.410285 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.257394 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j924w"] Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.260446 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.274735 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j924w"] Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.409264 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.409345 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.425643 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-catalog-content\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.425717 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99fl6\" (UniqueName: \"kubernetes.io/projected/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-kube-api-access-99fl6\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.426089 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-utilities\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.528615 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-catalog-content\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.528667 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99fl6\" (UniqueName: \"kubernetes.io/projected/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-kube-api-access-99fl6\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.528766 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-utilities\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.529195 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-catalog-content\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.529251 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-utilities\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.550689 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99fl6\" (UniqueName: \"kubernetes.io/projected/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-kube-api-access-99fl6\") pod \"certified-operators-j924w\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:47 crc kubenswrapper[4879]: I1125 16:43:47.604912 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:48 crc kubenswrapper[4879]: I1125 16:43:48.207957 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j924w"] Nov 25 16:43:48 crc kubenswrapper[4879]: I1125 16:43:48.232488 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerStarted","Data":"246706dc32fcdf380a9ea225f0c947118f52076bdb0d39d25ba4470607473c23"} Nov 25 16:43:49 crc kubenswrapper[4879]: I1125 16:43:49.244795 4879 generic.go:334] "Generic (PLEG): container finished" podID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerID="de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a" exitCode=0 Nov 25 16:43:49 crc kubenswrapper[4879]: I1125 16:43:49.244922 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerDied","Data":"de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a"} Nov 25 16:43:51 crc kubenswrapper[4879]: I1125 16:43:51.262588 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerStarted","Data":"cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d"} Nov 25 16:43:52 crc kubenswrapper[4879]: I1125 16:43:52.274243 4879 generic.go:334] "Generic (PLEG): container finished" podID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerID="cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d" exitCode=0 Nov 25 16:43:52 crc kubenswrapper[4879]: I1125 16:43:52.274287 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerDied","Data":"cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d"} Nov 25 16:43:53 crc kubenswrapper[4879]: I1125 16:43:53.289504 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerStarted","Data":"fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695"} Nov 25 16:43:53 crc kubenswrapper[4879]: I1125 16:43:53.308930 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j924w" podStartSLOduration=2.609271055 podStartE2EDuration="6.308912807s" podCreationTimestamp="2025-11-25 16:43:47 +0000 UTC" firstStartedPulling="2025-11-25 16:43:49.247063179 +0000 UTC m=+8320.850476250" lastFinishedPulling="2025-11-25 16:43:52.946704931 +0000 UTC m=+8324.550118002" observedRunningTime="2025-11-25 16:43:53.303417729 +0000 UTC m=+8324.906830820" watchObservedRunningTime="2025-11-25 16:43:53.308912807 +0000 UTC m=+8324.912325878" Nov 25 16:43:57 crc kubenswrapper[4879]: I1125 16:43:57.606378 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:57 crc kubenswrapper[4879]: I1125 16:43:57.607053 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:57 crc kubenswrapper[4879]: I1125 16:43:57.658338 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:58 crc kubenswrapper[4879]: I1125 16:43:58.382498 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:43:59 crc kubenswrapper[4879]: I1125 16:43:59.441894 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j924w"] Nov 25 16:44:00 crc kubenswrapper[4879]: I1125 16:44:00.352572 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j924w" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="registry-server" containerID="cri-o://fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695" gracePeriod=2 Nov 25 16:44:00 crc kubenswrapper[4879]: I1125 16:44:00.832051 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.009329 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99fl6\" (UniqueName: \"kubernetes.io/projected/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-kube-api-access-99fl6\") pod \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.009721 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-utilities\") pod \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.009865 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-catalog-content\") pod \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\" (UID: \"7cba849d-aeb2-45a7-a6bb-12a38e0afff2\") " Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.010398 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-utilities" (OuterVolumeSpecName: "utilities") pod "7cba849d-aeb2-45a7-a6bb-12a38e0afff2" (UID: "7cba849d-aeb2-45a7-a6bb-12a38e0afff2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.015500 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-kube-api-access-99fl6" (OuterVolumeSpecName: "kube-api-access-99fl6") pod "7cba849d-aeb2-45a7-a6bb-12a38e0afff2" (UID: "7cba849d-aeb2-45a7-a6bb-12a38e0afff2"). InnerVolumeSpecName "kube-api-access-99fl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.122591 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.122643 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99fl6\" (UniqueName: \"kubernetes.io/projected/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-kube-api-access-99fl6\") on node \"crc\" DevicePath \"\"" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.364897 4879 generic.go:334] "Generic (PLEG): container finished" podID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerID="fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695" exitCode=0 Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.364952 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerDied","Data":"fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695"} Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.364984 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j924w" event={"ID":"7cba849d-aeb2-45a7-a6bb-12a38e0afff2","Type":"ContainerDied","Data":"246706dc32fcdf380a9ea225f0c947118f52076bdb0d39d25ba4470607473c23"} Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.365006 4879 scope.go:117] "RemoveContainer" containerID="fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.365231 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j924w" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.402150 4879 scope.go:117] "RemoveContainer" containerID="cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.431377 4879 scope.go:117] "RemoveContainer" containerID="de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.475249 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7cba849d-aeb2-45a7-a6bb-12a38e0afff2" (UID: "7cba849d-aeb2-45a7-a6bb-12a38e0afff2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.495741 4879 scope.go:117] "RemoveContainer" containerID="fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695" Nov 25 16:44:01 crc kubenswrapper[4879]: E1125 16:44:01.496202 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695\": container with ID starting with fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695 not found: ID does not exist" containerID="fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.496332 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695"} err="failed to get container status \"fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695\": rpc error: code = NotFound desc = could not find container \"fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695\": container with ID starting with fe4ec3780f90c57d3291ae7594035229dd20baa4f4ea9df21a0e0135129ee695 not found: ID does not exist" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.496411 4879 scope.go:117] "RemoveContainer" containerID="cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d" Nov 25 16:44:01 crc kubenswrapper[4879]: E1125 16:44:01.496970 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d\": container with ID starting with cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d not found: ID does not exist" containerID="cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.497114 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d"} err="failed to get container status \"cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d\": rpc error: code = NotFound desc = could not find container \"cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d\": container with ID starting with cca9793a5c305a75bf043fd93a388d8e066f15f6a68108b9948dd3e9ed990b4d not found: ID does not exist" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.497211 4879 scope.go:117] "RemoveContainer" containerID="de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a" Nov 25 16:44:01 crc kubenswrapper[4879]: E1125 16:44:01.497702 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a\": container with ID starting with de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a not found: ID does not exist" containerID="de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.497722 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a"} err="failed to get container status \"de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a\": rpc error: code = NotFound desc = could not find container \"de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a\": container with ID starting with de6a3e19bd7494e5ac74a41b8a3fa45a4a087c1d6726b43a15b8cdd17a2e1f8a not found: ID does not exist" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.535273 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cba849d-aeb2-45a7-a6bb-12a38e0afff2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.696887 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j924w"] Nov 25 16:44:01 crc kubenswrapper[4879]: I1125 16:44:01.706411 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j924w"] Nov 25 16:44:03 crc kubenswrapper[4879]: I1125 16:44:03.656507 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" path="/var/lib/kubelet/pods/7cba849d-aeb2-45a7-a6bb-12a38e0afff2/volumes" Nov 25 16:44:17 crc kubenswrapper[4879]: I1125 16:44:17.409002 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:44:17 crc kubenswrapper[4879]: I1125 16:44:17.410330 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:44:17 crc kubenswrapper[4879]: I1125 16:44:17.410403 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:44:17 crc kubenswrapper[4879]: I1125 16:44:17.411239 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b0bce0d1924c1d2381c525cd961767e65bda3be3231dcaf504e340ee185ec7b8"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:44:17 crc kubenswrapper[4879]: I1125 16:44:17.411296 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://b0bce0d1924c1d2381c525cd961767e65bda3be3231dcaf504e340ee185ec7b8" gracePeriod=600 Nov 25 16:44:18 crc kubenswrapper[4879]: I1125 16:44:18.535557 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="b0bce0d1924c1d2381c525cd961767e65bda3be3231dcaf504e340ee185ec7b8" exitCode=0 Nov 25 16:44:18 crc kubenswrapper[4879]: I1125 16:44:18.535661 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"b0bce0d1924c1d2381c525cd961767e65bda3be3231dcaf504e340ee185ec7b8"} Nov 25 16:44:18 crc kubenswrapper[4879]: I1125 16:44:18.536271 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0"} Nov 25 16:44:18 crc kubenswrapper[4879]: I1125 16:44:18.536305 4879 scope.go:117] "RemoveContainer" containerID="4025b779574f8e8ef41a7b40b1ab37e9ae5ae500c29ba95fb2a24edff842fbbf" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.149820 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262"] Nov 25 16:45:00 crc kubenswrapper[4879]: E1125 16:45:00.150870 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="extract-content" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.150889 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="extract-content" Nov 25 16:45:00 crc kubenswrapper[4879]: E1125 16:45:00.150915 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="extract-utilities" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.150922 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="extract-utilities" Nov 25 16:45:00 crc kubenswrapper[4879]: E1125 16:45:00.150943 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="registry-server" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.150951 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="registry-server" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.151218 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cba849d-aeb2-45a7-a6bb-12a38e0afff2" containerName="registry-server" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.152186 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.154059 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.154364 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.168830 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262"] Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.316784 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77b400bf-d905-4a68-8ef6-330494090d74-secret-volume\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.316929 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77b400bf-d905-4a68-8ef6-330494090d74-config-volume\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.316978 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft287\" (UniqueName: \"kubernetes.io/projected/77b400bf-d905-4a68-8ef6-330494090d74-kube-api-access-ft287\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.418345 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77b400bf-d905-4a68-8ef6-330494090d74-config-volume\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.418422 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft287\" (UniqueName: \"kubernetes.io/projected/77b400bf-d905-4a68-8ef6-330494090d74-kube-api-access-ft287\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.418486 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77b400bf-d905-4a68-8ef6-330494090d74-secret-volume\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.419662 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77b400bf-d905-4a68-8ef6-330494090d74-config-volume\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.424527 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77b400bf-d905-4a68-8ef6-330494090d74-secret-volume\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.433705 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft287\" (UniqueName: \"kubernetes.io/projected/77b400bf-d905-4a68-8ef6-330494090d74-kube-api-access-ft287\") pod \"collect-profiles-29401485-hf262\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.473906 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:00 crc kubenswrapper[4879]: I1125 16:45:00.891731 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262"] Nov 25 16:45:02 crc kubenswrapper[4879]: I1125 16:45:02.013580 4879 generic.go:334] "Generic (PLEG): container finished" podID="77b400bf-d905-4a68-8ef6-330494090d74" containerID="1ee2d4d90aaa82ea71fdc6cb586f7f1069e1c882f5c38504b52303dfff35d67f" exitCode=0 Nov 25 16:45:02 crc kubenswrapper[4879]: I1125 16:45:02.013651 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" event={"ID":"77b400bf-d905-4a68-8ef6-330494090d74","Type":"ContainerDied","Data":"1ee2d4d90aaa82ea71fdc6cb586f7f1069e1c882f5c38504b52303dfff35d67f"} Nov 25 16:45:02 crc kubenswrapper[4879]: I1125 16:45:02.014116 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" event={"ID":"77b400bf-d905-4a68-8ef6-330494090d74","Type":"ContainerStarted","Data":"6d505df81a00fa7bc2334ed7b9e582422a52901a62bd0198a097093d1f508522"} Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.396447 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.479548 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft287\" (UniqueName: \"kubernetes.io/projected/77b400bf-d905-4a68-8ef6-330494090d74-kube-api-access-ft287\") pod \"77b400bf-d905-4a68-8ef6-330494090d74\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.479782 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77b400bf-d905-4a68-8ef6-330494090d74-config-volume\") pod \"77b400bf-d905-4a68-8ef6-330494090d74\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.479973 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77b400bf-d905-4a68-8ef6-330494090d74-secret-volume\") pod \"77b400bf-d905-4a68-8ef6-330494090d74\" (UID: \"77b400bf-d905-4a68-8ef6-330494090d74\") " Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.480312 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77b400bf-d905-4a68-8ef6-330494090d74-config-volume" (OuterVolumeSpecName: "config-volume") pod "77b400bf-d905-4a68-8ef6-330494090d74" (UID: "77b400bf-d905-4a68-8ef6-330494090d74"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.480482 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77b400bf-d905-4a68-8ef6-330494090d74-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.484350 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77b400bf-d905-4a68-8ef6-330494090d74-kube-api-access-ft287" (OuterVolumeSpecName: "kube-api-access-ft287") pod "77b400bf-d905-4a68-8ef6-330494090d74" (UID: "77b400bf-d905-4a68-8ef6-330494090d74"). InnerVolumeSpecName "kube-api-access-ft287". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.485634 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77b400bf-d905-4a68-8ef6-330494090d74-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "77b400bf-d905-4a68-8ef6-330494090d74" (UID: "77b400bf-d905-4a68-8ef6-330494090d74"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.582500 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77b400bf-d905-4a68-8ef6-330494090d74-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:03 crc kubenswrapper[4879]: I1125 16:45:03.582536 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft287\" (UniqueName: \"kubernetes.io/projected/77b400bf-d905-4a68-8ef6-330494090d74-kube-api-access-ft287\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:04 crc kubenswrapper[4879]: I1125 16:45:04.036805 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" event={"ID":"77b400bf-d905-4a68-8ef6-330494090d74","Type":"ContainerDied","Data":"6d505df81a00fa7bc2334ed7b9e582422a52901a62bd0198a097093d1f508522"} Nov 25 16:45:04 crc kubenswrapper[4879]: I1125 16:45:04.036869 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d505df81a00fa7bc2334ed7b9e582422a52901a62bd0198a097093d1f508522" Nov 25 16:45:04 crc kubenswrapper[4879]: I1125 16:45:04.036943 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401485-hf262" Nov 25 16:45:04 crc kubenswrapper[4879]: I1125 16:45:04.462722 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4"] Nov 25 16:45:04 crc kubenswrapper[4879]: I1125 16:45:04.472379 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401440-j5ch4"] Nov 25 16:45:05 crc kubenswrapper[4879]: I1125 16:45:05.661219 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e3d9297-b45d-43a2-8169-4b5b4b7965dc" path="/var/lib/kubelet/pods/5e3d9297-b45d-43a2-8169-4b5b4b7965dc/volumes" Nov 25 16:45:55 crc kubenswrapper[4879]: I1125 16:45:55.575049 4879 generic.go:334] "Generic (PLEG): container finished" podID="f09196b3-7c17-4117-9733-77a97644d23c" containerID="81e8130393ed0124ce2b03a9d0786e0ccce2d48f8cd2d9494cc7e2bc56d073e7" exitCode=0 Nov 25 16:45:55 crc kubenswrapper[4879]: I1125 16:45:55.575246 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" event={"ID":"f09196b3-7c17-4117-9733-77a97644d23c","Type":"ContainerDied","Data":"81e8130393ed0124ce2b03a9d0786e0ccce2d48f8cd2d9494cc7e2bc56d073e7"} Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.040396 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157264 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ssh-key\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157409 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-0\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157463 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-1\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157495 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ceph\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157564 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-1\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157610 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-0\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157644 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-inventory\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157682 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wg7dr\" (UniqueName: \"kubernetes.io/projected/f09196b3-7c17-4117-9733-77a97644d23c-kube-api-access-wg7dr\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157776 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-0\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157840 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-1\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.157914 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-combined-ca-bundle\") pod \"f09196b3-7c17-4117-9733-77a97644d23c\" (UID: \"f09196b3-7c17-4117-9733-77a97644d23c\") " Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.165452 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ceph" (OuterVolumeSpecName: "ceph") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.178355 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.178994 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f09196b3-7c17-4117-9733-77a97644d23c-kube-api-access-wg7dr" (OuterVolumeSpecName: "kube-api-access-wg7dr") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "kube-api-access-wg7dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.187330 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.188407 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.189511 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-inventory" (OuterVolumeSpecName: "inventory") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.190593 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.191602 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.194858 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.195252 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.201826 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f09196b3-7c17-4117-9733-77a97644d23c" (UID: "f09196b3-7c17-4117-9733-77a97644d23c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.260608 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.260827 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.260906 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.260977 4879 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261081 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261260 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261335 4879 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261404 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/f09196b3-7c17-4117-9733-77a97644d23c-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261475 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261546 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wg7dr\" (UniqueName: \"kubernetes.io/projected/f09196b3-7c17-4117-9733-77a97644d23c-kube-api-access-wg7dr\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.261621 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f09196b3-7c17-4117-9733-77a97644d23c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.597764 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" event={"ID":"f09196b3-7c17-4117-9733-77a97644d23c","Type":"ContainerDied","Data":"dff36971175c1f970d324ef956e4bd6da07477f78477cb37016c07cb8bf9ff0b"} Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.597790 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-sng5x" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.597804 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dff36971175c1f970d324ef956e4bd6da07477f78477cb37016c07cb8bf9ff0b" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.699984 4879 scope.go:117] "RemoveContainer" containerID="ab1e87d9f02145d94806aa3e63257cf261ef051cf745e513afafd69ba4af1a9d" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.702357 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-56m2z"] Nov 25 16:45:57 crc kubenswrapper[4879]: E1125 16:45:57.702820 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77b400bf-d905-4a68-8ef6-330494090d74" containerName="collect-profiles" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.702838 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="77b400bf-d905-4a68-8ef6-330494090d74" containerName="collect-profiles" Nov 25 16:45:57 crc kubenswrapper[4879]: E1125 16:45:57.702857 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09196b3-7c17-4117-9733-77a97644d23c" containerName="nova-cell1-openstack-openstack-cell1" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.702864 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09196b3-7c17-4117-9733-77a97644d23c" containerName="nova-cell1-openstack-openstack-cell1" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.703074 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="77b400bf-d905-4a68-8ef6-330494090d74" containerName="collect-profiles" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.703092 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f09196b3-7c17-4117-9733-77a97644d23c" containerName="nova-cell1-openstack-openstack-cell1" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.703843 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.709356 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.709515 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.709752 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.710424 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.710452 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.775674 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-56m2z"] Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877178 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877250 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877296 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877343 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ssh-key\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877393 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877414 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-inventory\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877444 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceph\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.877497 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx2x4\" (UniqueName: \"kubernetes.io/projected/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-kube-api-access-kx2x4\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.978641 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.978683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-inventory\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.978732 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceph\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.978819 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx2x4\" (UniqueName: \"kubernetes.io/projected/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-kube-api-access-kx2x4\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.979556 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.979739 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.979926 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.980073 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ssh-key\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.982954 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceph\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.983652 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-inventory\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.983730 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.983758 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.983812 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ssh-key\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.985195 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.985503 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:57 crc kubenswrapper[4879]: I1125 16:45:57.998686 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx2x4\" (UniqueName: \"kubernetes.io/projected/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-kube-api-access-kx2x4\") pod \"telemetry-openstack-openstack-cell1-56m2z\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:58 crc kubenswrapper[4879]: I1125 16:45:58.084348 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:45:58 crc kubenswrapper[4879]: I1125 16:45:58.642723 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-56m2z"] Nov 25 16:45:59 crc kubenswrapper[4879]: I1125 16:45:59.623163 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" event={"ID":"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0","Type":"ContainerStarted","Data":"b8713163903076b9d1eead70f705c98ea0413771dfcaa31dc7aa45829fda36c3"} Nov 25 16:46:00 crc kubenswrapper[4879]: I1125 16:46:00.637245 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" event={"ID":"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0","Type":"ContainerStarted","Data":"8816ed58c004aea166eeeee08be0d58156253ee8500c1ac8ac64f41b5aa3871b"} Nov 25 16:46:00 crc kubenswrapper[4879]: I1125 16:46:00.672491 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" podStartSLOduration=2.989546677 podStartE2EDuration="3.672471868s" podCreationTimestamp="2025-11-25 16:45:57 +0000 UTC" firstStartedPulling="2025-11-25 16:45:58.647258304 +0000 UTC m=+8450.250671365" lastFinishedPulling="2025-11-25 16:45:59.330183485 +0000 UTC m=+8450.933596556" observedRunningTime="2025-11-25 16:46:00.661158783 +0000 UTC m=+8452.264571854" watchObservedRunningTime="2025-11-25 16:46:00.672471868 +0000 UTC m=+8452.275884939" Nov 25 16:46:17 crc kubenswrapper[4879]: I1125 16:46:17.409233 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:46:17 crc kubenswrapper[4879]: I1125 16:46:17.409805 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:46:47 crc kubenswrapper[4879]: I1125 16:46:47.409017 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:46:47 crc kubenswrapper[4879]: I1125 16:46:47.409509 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:47:17 crc kubenswrapper[4879]: I1125 16:47:17.408639 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:47:17 crc kubenswrapper[4879]: I1125 16:47:17.409639 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:47:17 crc kubenswrapper[4879]: I1125 16:47:17.409727 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:47:17 crc kubenswrapper[4879]: I1125 16:47:17.411291 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:47:17 crc kubenswrapper[4879]: I1125 16:47:17.411383 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" gracePeriod=600 Nov 25 16:47:17 crc kubenswrapper[4879]: E1125 16:47:17.541880 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:47:18 crc kubenswrapper[4879]: I1125 16:47:18.482517 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" exitCode=0 Nov 25 16:47:18 crc kubenswrapper[4879]: I1125 16:47:18.483838 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0"} Nov 25 16:47:18 crc kubenswrapper[4879]: I1125 16:47:18.483959 4879 scope.go:117] "RemoveContainer" containerID="b0bce0d1924c1d2381c525cd961767e65bda3be3231dcaf504e340ee185ec7b8" Nov 25 16:47:18 crc kubenswrapper[4879]: I1125 16:47:18.485241 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:47:18 crc kubenswrapper[4879]: E1125 16:47:18.485641 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:47:32 crc kubenswrapper[4879]: I1125 16:47:32.645222 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:47:32 crc kubenswrapper[4879]: E1125 16:47:32.645995 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:47:38 crc kubenswrapper[4879]: I1125 16:47:38.943335 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vh6h9"] Nov 25 16:47:38 crc kubenswrapper[4879]: I1125 16:47:38.946912 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:38 crc kubenswrapper[4879]: I1125 16:47:38.962740 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vh6h9"] Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.075539 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swvwf\" (UniqueName: \"kubernetes.io/projected/47c1157a-779b-42cc-8afc-76759c23c65b-kube-api-access-swvwf\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.075889 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-utilities\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.076198 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-catalog-content\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.179217 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-catalog-content\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.179377 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swvwf\" (UniqueName: \"kubernetes.io/projected/47c1157a-779b-42cc-8afc-76759c23c65b-kube-api-access-swvwf\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.179533 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-utilities\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.180011 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-utilities\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.180049 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-catalog-content\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.206512 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swvwf\" (UniqueName: \"kubernetes.io/projected/47c1157a-779b-42cc-8afc-76759c23c65b-kube-api-access-swvwf\") pod \"community-operators-vh6h9\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.276783 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:39 crc kubenswrapper[4879]: I1125 16:47:39.819457 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vh6h9"] Nov 25 16:47:40 crc kubenswrapper[4879]: I1125 16:47:40.733655 4879 generic.go:334] "Generic (PLEG): container finished" podID="47c1157a-779b-42cc-8afc-76759c23c65b" containerID="8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b" exitCode=0 Nov 25 16:47:40 crc kubenswrapper[4879]: I1125 16:47:40.733846 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerDied","Data":"8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b"} Nov 25 16:47:40 crc kubenswrapper[4879]: I1125 16:47:40.733945 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerStarted","Data":"970964bf97b98bde8ec58cd72affe8404354f2877155780eda448b0dbf42a342"} Nov 25 16:47:41 crc kubenswrapper[4879]: I1125 16:47:41.745525 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerStarted","Data":"4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65"} Nov 25 16:47:43 crc kubenswrapper[4879]: I1125 16:47:43.767831 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerDied","Data":"4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65"} Nov 25 16:47:43 crc kubenswrapper[4879]: I1125 16:47:43.767765 4879 generic.go:334] "Generic (PLEG): container finished" podID="47c1157a-779b-42cc-8afc-76759c23c65b" containerID="4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65" exitCode=0 Nov 25 16:47:44 crc kubenswrapper[4879]: I1125 16:47:44.645294 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:47:44 crc kubenswrapper[4879]: E1125 16:47:44.645952 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:47:44 crc kubenswrapper[4879]: I1125 16:47:44.781976 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerStarted","Data":"2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e"} Nov 25 16:47:44 crc kubenswrapper[4879]: I1125 16:47:44.809044 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vh6h9" podStartSLOduration=3.354248355 podStartE2EDuration="6.809022656s" podCreationTimestamp="2025-11-25 16:47:38 +0000 UTC" firstStartedPulling="2025-11-25 16:47:40.735883604 +0000 UTC m=+8552.339296675" lastFinishedPulling="2025-11-25 16:47:44.190657885 +0000 UTC m=+8555.794070976" observedRunningTime="2025-11-25 16:47:44.802747768 +0000 UTC m=+8556.406160859" watchObservedRunningTime="2025-11-25 16:47:44.809022656 +0000 UTC m=+8556.412435727" Nov 25 16:47:49 crc kubenswrapper[4879]: I1125 16:47:49.277048 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:49 crc kubenswrapper[4879]: I1125 16:47:49.277588 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:49 crc kubenswrapper[4879]: I1125 16:47:49.326647 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:49 crc kubenswrapper[4879]: I1125 16:47:49.875911 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:49 crc kubenswrapper[4879]: I1125 16:47:49.922735 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vh6h9"] Nov 25 16:47:51 crc kubenswrapper[4879]: I1125 16:47:51.849885 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vh6h9" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="registry-server" containerID="cri-o://2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e" gracePeriod=2 Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.817963 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.880848 4879 generic.go:334] "Generic (PLEG): container finished" podID="47c1157a-779b-42cc-8afc-76759c23c65b" containerID="2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e" exitCode=0 Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.880903 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerDied","Data":"2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e"} Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.880930 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vh6h9" event={"ID":"47c1157a-779b-42cc-8afc-76759c23c65b","Type":"ContainerDied","Data":"970964bf97b98bde8ec58cd72affe8404354f2877155780eda448b0dbf42a342"} Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.880949 4879 scope.go:117] "RemoveContainer" containerID="2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.882604 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vh6h9" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.912593 4879 scope.go:117] "RemoveContainer" containerID="4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.933216 4879 scope.go:117] "RemoveContainer" containerID="8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.978892 4879 scope.go:117] "RemoveContainer" containerID="2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e" Nov 25 16:47:52 crc kubenswrapper[4879]: E1125 16:47:52.979289 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e\": container with ID starting with 2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e not found: ID does not exist" containerID="2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.979336 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e"} err="failed to get container status \"2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e\": rpc error: code = NotFound desc = could not find container \"2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e\": container with ID starting with 2c4691c1e441c16ab6a577322e3b9676e14baab3b3423751a7160d3ddf437a5e not found: ID does not exist" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.979362 4879 scope.go:117] "RemoveContainer" containerID="4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65" Nov 25 16:47:52 crc kubenswrapper[4879]: E1125 16:47:52.979645 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65\": container with ID starting with 4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65 not found: ID does not exist" containerID="4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.979677 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65"} err="failed to get container status \"4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65\": rpc error: code = NotFound desc = could not find container \"4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65\": container with ID starting with 4411a01e041f5b2ea31317d92c59bade8a58c0e5e34119356d0ee4de4a3c3b65 not found: ID does not exist" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.979699 4879 scope.go:117] "RemoveContainer" containerID="8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b" Nov 25 16:47:52 crc kubenswrapper[4879]: E1125 16:47:52.979992 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b\": container with ID starting with 8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b not found: ID does not exist" containerID="8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.980040 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b"} err="failed to get container status \"8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b\": rpc error: code = NotFound desc = could not find container \"8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b\": container with ID starting with 8c3039710ef4a7c6817381aa0d991adcb7cde3b4f566908ba4a6fb3a0c60825b not found: ID does not exist" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.981280 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-utilities\") pod \"47c1157a-779b-42cc-8afc-76759c23c65b\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.981385 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-catalog-content\") pod \"47c1157a-779b-42cc-8afc-76759c23c65b\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.981537 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swvwf\" (UniqueName: \"kubernetes.io/projected/47c1157a-779b-42cc-8afc-76759c23c65b-kube-api-access-swvwf\") pod \"47c1157a-779b-42cc-8afc-76759c23c65b\" (UID: \"47c1157a-779b-42cc-8afc-76759c23c65b\") " Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.982017 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-utilities" (OuterVolumeSpecName: "utilities") pod "47c1157a-779b-42cc-8afc-76759c23c65b" (UID: "47c1157a-779b-42cc-8afc-76759c23c65b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:47:52 crc kubenswrapper[4879]: I1125 16:47:52.986467 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c1157a-779b-42cc-8afc-76759c23c65b-kube-api-access-swvwf" (OuterVolumeSpecName: "kube-api-access-swvwf") pod "47c1157a-779b-42cc-8afc-76759c23c65b" (UID: "47c1157a-779b-42cc-8afc-76759c23c65b"). InnerVolumeSpecName "kube-api-access-swvwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.028045 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47c1157a-779b-42cc-8afc-76759c23c65b" (UID: "47c1157a-779b-42cc-8afc-76759c23c65b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.084218 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.084259 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swvwf\" (UniqueName: \"kubernetes.io/projected/47c1157a-779b-42cc-8afc-76759c23c65b-kube-api-access-swvwf\") on node \"crc\" DevicePath \"\"" Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.084272 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c1157a-779b-42cc-8afc-76759c23c65b-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.231875 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vh6h9"] Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.241675 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vh6h9"] Nov 25 16:47:53 crc kubenswrapper[4879]: I1125 16:47:53.658184 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" path="/var/lib/kubelet/pods/47c1157a-779b-42cc-8afc-76759c23c65b/volumes" Nov 25 16:47:55 crc kubenswrapper[4879]: I1125 16:47:55.644268 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:47:55 crc kubenswrapper[4879]: E1125 16:47:55.644984 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:48:09 crc kubenswrapper[4879]: I1125 16:48:09.654738 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:48:09 crc kubenswrapper[4879]: E1125 16:48:09.656508 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:48:20 crc kubenswrapper[4879]: I1125 16:48:20.645635 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:48:20 crc kubenswrapper[4879]: E1125 16:48:20.646478 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:48:32 crc kubenswrapper[4879]: I1125 16:48:32.644935 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:48:32 crc kubenswrapper[4879]: E1125 16:48:32.645913 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:48:46 crc kubenswrapper[4879]: I1125 16:48:46.645031 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:48:46 crc kubenswrapper[4879]: E1125 16:48:46.645727 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:48:59 crc kubenswrapper[4879]: I1125 16:48:59.645266 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:48:59 crc kubenswrapper[4879]: E1125 16:48:59.646411 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:49:10 crc kubenswrapper[4879]: I1125 16:49:10.645435 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:49:10 crc kubenswrapper[4879]: E1125 16:49:10.646342 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:49:24 crc kubenswrapper[4879]: I1125 16:49:24.645543 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:49:24 crc kubenswrapper[4879]: E1125 16:49:24.646624 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:49:35 crc kubenswrapper[4879]: I1125 16:49:35.645089 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:49:35 crc kubenswrapper[4879]: E1125 16:49:35.646499 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.609370 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k57v5"] Nov 25 16:49:44 crc kubenswrapper[4879]: E1125 16:49:44.610457 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="extract-utilities" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.610495 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="extract-utilities" Nov 25 16:49:44 crc kubenswrapper[4879]: E1125 16:49:44.610519 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="extract-content" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.610528 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="extract-content" Nov 25 16:49:44 crc kubenswrapper[4879]: E1125 16:49:44.610565 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="registry-server" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.610574 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="registry-server" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.610896 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c1157a-779b-42cc-8afc-76759c23c65b" containerName="registry-server" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.612648 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.626751 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k57v5"] Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.711337 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-utilities\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.711890 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5gp4\" (UniqueName: \"kubernetes.io/projected/21f9ea93-9e1e-443e-9f22-7c9a915369e5-kube-api-access-l5gp4\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.712319 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-catalog-content\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.813896 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5gp4\" (UniqueName: \"kubernetes.io/projected/21f9ea93-9e1e-443e-9f22-7c9a915369e5-kube-api-access-l5gp4\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.814060 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-catalog-content\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.814116 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-utilities\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.814572 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-catalog-content\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.814734 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-utilities\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.834850 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5gp4\" (UniqueName: \"kubernetes.io/projected/21f9ea93-9e1e-443e-9f22-7c9a915369e5-kube-api-access-l5gp4\") pod \"redhat-operators-k57v5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:44 crc kubenswrapper[4879]: I1125 16:49:44.945751 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:49:45 crc kubenswrapper[4879]: I1125 16:49:45.442918 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k57v5"] Nov 25 16:49:46 crc kubenswrapper[4879]: I1125 16:49:46.128669 4879 generic.go:334] "Generic (PLEG): container finished" podID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerID="979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a" exitCode=0 Nov 25 16:49:46 crc kubenswrapper[4879]: I1125 16:49:46.128722 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerDied","Data":"979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a"} Nov 25 16:49:46 crc kubenswrapper[4879]: I1125 16:49:46.128932 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerStarted","Data":"1857c614b762eba02bd33f7cec58dbf95924c44815ee90d528cd0fe7b40913bb"} Nov 25 16:49:46 crc kubenswrapper[4879]: I1125 16:49:46.131463 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:49:48 crc kubenswrapper[4879]: I1125 16:49:48.646558 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:49:48 crc kubenswrapper[4879]: E1125 16:49:48.647375 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:49:49 crc kubenswrapper[4879]: I1125 16:49:49.159596 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerStarted","Data":"da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee"} Nov 25 16:49:55 crc kubenswrapper[4879]: I1125 16:49:55.221305 4879 generic.go:334] "Generic (PLEG): container finished" podID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerID="da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee" exitCode=0 Nov 25 16:49:55 crc kubenswrapper[4879]: I1125 16:49:55.221386 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerDied","Data":"da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee"} Nov 25 16:49:59 crc kubenswrapper[4879]: I1125 16:49:59.653780 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:49:59 crc kubenswrapper[4879]: E1125 16:49:59.654726 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.296729 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerStarted","Data":"0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8"} Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.379866 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vxdqh"] Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.382602 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.397554 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxdqh"] Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.491959 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-catalog-content\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.492007 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv4kr\" (UniqueName: \"kubernetes.io/projected/c2edf17a-980d-485d-8a6b-bbf01d842c22-kube-api-access-rv4kr\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.492563 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-utilities\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.594829 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-catalog-content\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.594877 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv4kr\" (UniqueName: \"kubernetes.io/projected/c2edf17a-980d-485d-8a6b-bbf01d842c22-kube-api-access-rv4kr\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.595047 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-utilities\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.595419 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-catalog-content\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.595491 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-utilities\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.615601 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv4kr\" (UniqueName: \"kubernetes.io/projected/c2edf17a-980d-485d-8a6b-bbf01d842c22-kube-api-access-rv4kr\") pod \"redhat-marketplace-vxdqh\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:01 crc kubenswrapper[4879]: I1125 16:50:01.734482 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:02 crc kubenswrapper[4879]: I1125 16:50:02.203971 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxdqh"] Nov 25 16:50:02 crc kubenswrapper[4879]: I1125 16:50:02.309376 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerStarted","Data":"ef26f7b84bbdf1055c885ddbbd37e1bc9ea87e453491feccf1b013f3bbf3c573"} Nov 25 16:50:02 crc kubenswrapper[4879]: I1125 16:50:02.333404 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k57v5" podStartSLOduration=3.82136818 podStartE2EDuration="18.333386289s" podCreationTimestamp="2025-11-25 16:49:44 +0000 UTC" firstStartedPulling="2025-11-25 16:49:46.131162108 +0000 UTC m=+8677.734575189" lastFinishedPulling="2025-11-25 16:50:00.643180237 +0000 UTC m=+8692.246593298" observedRunningTime="2025-11-25 16:50:02.323558087 +0000 UTC m=+8693.926971158" watchObservedRunningTime="2025-11-25 16:50:02.333386289 +0000 UTC m=+8693.936799360" Nov 25 16:50:04 crc kubenswrapper[4879]: I1125 16:50:04.328800 4879 generic.go:334] "Generic (PLEG): container finished" podID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerID="fda267f650614f773768ef3d578fa584b1e80bd05277b8778de733de88572bc1" exitCode=0 Nov 25 16:50:04 crc kubenswrapper[4879]: I1125 16:50:04.328860 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerDied","Data":"fda267f650614f773768ef3d578fa584b1e80bd05277b8778de733de88572bc1"} Nov 25 16:50:04 crc kubenswrapper[4879]: I1125 16:50:04.946713 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:50:04 crc kubenswrapper[4879]: I1125 16:50:04.946759 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:50:05 crc kubenswrapper[4879]: I1125 16:50:05.997302 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-k57v5" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="registry-server" probeResult="failure" output=< Nov 25 16:50:05 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 16:50:05 crc kubenswrapper[4879]: > Nov 25 16:50:08 crc kubenswrapper[4879]: I1125 16:50:08.382268 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerStarted","Data":"05fdfe24ee067850e5cdbd9913b5de1e5c7bc167e75ff91d3953a883adfc9fac"} Nov 25 16:50:14 crc kubenswrapper[4879]: I1125 16:50:14.645914 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:50:14 crc kubenswrapper[4879]: E1125 16:50:14.647037 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:50:14 crc kubenswrapper[4879]: I1125 16:50:14.998353 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:50:15 crc kubenswrapper[4879]: I1125 16:50:15.073958 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:50:16 crc kubenswrapper[4879]: I1125 16:50:16.507812 4879 generic.go:334] "Generic (PLEG): container finished" podID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerID="05fdfe24ee067850e5cdbd9913b5de1e5c7bc167e75ff91d3953a883adfc9fac" exitCode=0 Nov 25 16:50:16 crc kubenswrapper[4879]: I1125 16:50:16.507871 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerDied","Data":"05fdfe24ee067850e5cdbd9913b5de1e5c7bc167e75ff91d3953a883adfc9fac"} Nov 25 16:50:18 crc kubenswrapper[4879]: I1125 16:50:18.436490 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k57v5"] Nov 25 16:50:18 crc kubenswrapper[4879]: I1125 16:50:18.437419 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k57v5" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="registry-server" containerID="cri-o://0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8" gracePeriod=2 Nov 25 16:50:18 crc kubenswrapper[4879]: I1125 16:50:18.532980 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerStarted","Data":"e4af1be1f3768c30a5682f2ecbf327b98258db6facb5d9a27ef610f9393e09d6"} Nov 25 16:50:18 crc kubenswrapper[4879]: I1125 16:50:18.535176 4879 generic.go:334] "Generic (PLEG): container finished" podID="aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" containerID="8816ed58c004aea166eeeee08be0d58156253ee8500c1ac8ac64f41b5aa3871b" exitCode=0 Nov 25 16:50:18 crc kubenswrapper[4879]: I1125 16:50:18.535248 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" event={"ID":"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0","Type":"ContainerDied","Data":"8816ed58c004aea166eeeee08be0d58156253ee8500c1ac8ac64f41b5aa3871b"} Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.348233 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.403686 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-utilities\") pod \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.404082 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-catalog-content\") pod \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.404293 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5gp4\" (UniqueName: \"kubernetes.io/projected/21f9ea93-9e1e-443e-9f22-7c9a915369e5-kube-api-access-l5gp4\") pod \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\" (UID: \"21f9ea93-9e1e-443e-9f22-7c9a915369e5\") " Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.404583 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-utilities" (OuterVolumeSpecName: "utilities") pod "21f9ea93-9e1e-443e-9f22-7c9a915369e5" (UID: "21f9ea93-9e1e-443e-9f22-7c9a915369e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.405274 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.411938 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21f9ea93-9e1e-443e-9f22-7c9a915369e5-kube-api-access-l5gp4" (OuterVolumeSpecName: "kube-api-access-l5gp4") pod "21f9ea93-9e1e-443e-9f22-7c9a915369e5" (UID: "21f9ea93-9e1e-443e-9f22-7c9a915369e5"). InnerVolumeSpecName "kube-api-access-l5gp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.499249 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21f9ea93-9e1e-443e-9f22-7c9a915369e5" (UID: "21f9ea93-9e1e-443e-9f22-7c9a915369e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.507475 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21f9ea93-9e1e-443e-9f22-7c9a915369e5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.507745 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5gp4\" (UniqueName: \"kubernetes.io/projected/21f9ea93-9e1e-443e-9f22-7c9a915369e5-kube-api-access-l5gp4\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.547192 4879 generic.go:334] "Generic (PLEG): container finished" podID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerID="0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8" exitCode=0 Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.547401 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerDied","Data":"0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8"} Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.547484 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k57v5" event={"ID":"21f9ea93-9e1e-443e-9f22-7c9a915369e5","Type":"ContainerDied","Data":"1857c614b762eba02bd33f7cec58dbf95924c44815ee90d528cd0fe7b40913bb"} Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.547509 4879 scope.go:117] "RemoveContainer" containerID="0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.547554 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k57v5" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.572870 4879 scope.go:117] "RemoveContainer" containerID="da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.578141 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vxdqh" podStartSLOduration=4.702874091 podStartE2EDuration="18.57809969s" podCreationTimestamp="2025-11-25 16:50:01 +0000 UTC" firstStartedPulling="2025-11-25 16:50:04.331733658 +0000 UTC m=+8695.935146739" lastFinishedPulling="2025-11-25 16:50:18.206959267 +0000 UTC m=+8709.810372338" observedRunningTime="2025-11-25 16:50:19.569189342 +0000 UTC m=+8711.172602433" watchObservedRunningTime="2025-11-25 16:50:19.57809969 +0000 UTC m=+8711.181512761" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.602367 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k57v5"] Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.604485 4879 scope.go:117] "RemoveContainer" containerID="979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.615779 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k57v5"] Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.651648 4879 scope.go:117] "RemoveContainer" containerID="0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8" Nov 25 16:50:19 crc kubenswrapper[4879]: E1125 16:50:19.652095 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8\": container with ID starting with 0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8 not found: ID does not exist" containerID="0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.652143 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8"} err="failed to get container status \"0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8\": rpc error: code = NotFound desc = could not find container \"0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8\": container with ID starting with 0c36882a7ba65f45a62874d6a9f501858f0f7c2ec498c0e4e2a22e5e4d6da0a8 not found: ID does not exist" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.652165 4879 scope.go:117] "RemoveContainer" containerID="da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee" Nov 25 16:50:19 crc kubenswrapper[4879]: E1125 16:50:19.652454 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee\": container with ID starting with da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee not found: ID does not exist" containerID="da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.652476 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee"} err="failed to get container status \"da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee\": rpc error: code = NotFound desc = could not find container \"da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee\": container with ID starting with da07e8997f9af0e741301249f5c9aeb9ab378d7dd634539e0e509bac9a896aee not found: ID does not exist" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.652493 4879 scope.go:117] "RemoveContainer" containerID="979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a" Nov 25 16:50:19 crc kubenswrapper[4879]: E1125 16:50:19.652647 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a\": container with ID starting with 979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a not found: ID does not exist" containerID="979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.652668 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a"} err="failed to get container status \"979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a\": rpc error: code = NotFound desc = could not find container \"979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a\": container with ID starting with 979144fe3236578b574f45a0a3456a88e1f8030060a8e8f7607e7508c527331a not found: ID does not exist" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.674735 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" path="/var/lib/kubelet/pods/21f9ea93-9e1e-443e-9f22-7c9a915369e5/volumes" Nov 25 16:50:19 crc kubenswrapper[4879]: I1125 16:50:19.985404 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.017656 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-1\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.017811 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-inventory\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.017941 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-telemetry-combined-ca-bundle\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.017971 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-2\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.018001 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ssh-key\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.018043 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx2x4\" (UniqueName: \"kubernetes.io/projected/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-kube-api-access-kx2x4\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.018063 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceph\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.018158 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-0\") pod \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\" (UID: \"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0\") " Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.022142 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.022606 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-kube-api-access-kx2x4" (OuterVolumeSpecName: "kube-api-access-kx2x4") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "kube-api-access-kx2x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.023832 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceph" (OuterVolumeSpecName: "ceph") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.055593 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.057851 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.059341 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-inventory" (OuterVolumeSpecName: "inventory") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.059647 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.064814 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" (UID: "aa6e8e77-8d1f-4ad3-9c02-52aa840472b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.125980 4879 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126344 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126434 4879 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126507 4879 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126577 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126696 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx2x4\" (UniqueName: \"kubernetes.io/projected/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-kube-api-access-kx2x4\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126774 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.126854 4879 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aa6e8e77-8d1f-4ad3-9c02-52aa840472b0-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.573835 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" event={"ID":"aa6e8e77-8d1f-4ad3-9c02-52aa840472b0","Type":"ContainerDied","Data":"b8713163903076b9d1eead70f705c98ea0413771dfcaa31dc7aa45829fda36c3"} Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.574178 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8713163903076b9d1eead70f705c98ea0413771dfcaa31dc7aa45829fda36c3" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.573904 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-56m2z" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.672294 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-gn4dc"] Nov 25 16:50:20 crc kubenswrapper[4879]: E1125 16:50:20.672704 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="registry-server" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.672726 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="registry-server" Nov 25 16:50:20 crc kubenswrapper[4879]: E1125 16:50:20.672753 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" containerName="telemetry-openstack-openstack-cell1" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.672760 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" containerName="telemetry-openstack-openstack-cell1" Nov 25 16:50:20 crc kubenswrapper[4879]: E1125 16:50:20.672779 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="extract-content" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.672786 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="extract-content" Nov 25 16:50:20 crc kubenswrapper[4879]: E1125 16:50:20.672800 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="extract-utilities" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.672805 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="extract-utilities" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.673035 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="21f9ea93-9e1e-443e-9f22-7c9a915369e5" containerName="registry-server" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.673061 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa6e8e77-8d1f-4ad3-9c02-52aa840472b0" containerName="telemetry-openstack-openstack-cell1" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.673832 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.676775 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.679299 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.679340 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.680642 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.682976 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.692954 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-gn4dc"] Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.741495 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.741603 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.741642 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.741673 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.741709 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5qmp\" (UniqueName: \"kubernetes.io/projected/faa2bad0-2473-4bea-b07b-1b95f0d02413-kube-api-access-m5qmp\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.741800 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.843949 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.844034 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.844060 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.844091 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.844148 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5qmp\" (UniqueName: \"kubernetes.io/projected/faa2bad0-2473-4bea-b07b-1b95f0d02413-kube-api-access-m5qmp\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.844214 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.849774 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.849780 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.850269 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.850681 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.852410 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.864834 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5qmp\" (UniqueName: \"kubernetes.io/projected/faa2bad0-2473-4bea-b07b-1b95f0d02413-kube-api-access-m5qmp\") pod \"neutron-sriov-openstack-openstack-cell1-gn4dc\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:20 crc kubenswrapper[4879]: I1125 16:50:20.990914 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:50:21 crc kubenswrapper[4879]: I1125 16:50:21.517418 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-gn4dc"] Nov 25 16:50:21 crc kubenswrapper[4879]: W1125 16:50:21.519886 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfaa2bad0_2473_4bea_b07b_1b95f0d02413.slice/crio-53e41edc8b2f9e77075f8b33f4dfc3ca7cb39c48d731da5867f3b6adf2025838 WatchSource:0}: Error finding container 53e41edc8b2f9e77075f8b33f4dfc3ca7cb39c48d731da5867f3b6adf2025838: Status 404 returned error can't find the container with id 53e41edc8b2f9e77075f8b33f4dfc3ca7cb39c48d731da5867f3b6adf2025838 Nov 25 16:50:21 crc kubenswrapper[4879]: I1125 16:50:21.587634 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" event={"ID":"faa2bad0-2473-4bea-b07b-1b95f0d02413","Type":"ContainerStarted","Data":"53e41edc8b2f9e77075f8b33f4dfc3ca7cb39c48d731da5867f3b6adf2025838"} Nov 25 16:50:21 crc kubenswrapper[4879]: I1125 16:50:21.734908 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:21 crc kubenswrapper[4879]: I1125 16:50:21.734971 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:21 crc kubenswrapper[4879]: I1125 16:50:21.788032 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:23 crc kubenswrapper[4879]: I1125 16:50:23.613192 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" event={"ID":"faa2bad0-2473-4bea-b07b-1b95f0d02413","Type":"ContainerStarted","Data":"366eb86ef586380cda5d9081398a4fea7d0139e56d0521964a215bef58dc3655"} Nov 25 16:50:23 crc kubenswrapper[4879]: I1125 16:50:23.632544 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" podStartSLOduration=2.1451188 podStartE2EDuration="3.632506321s" podCreationTimestamp="2025-11-25 16:50:20 +0000 UTC" firstStartedPulling="2025-11-25 16:50:21.523596296 +0000 UTC m=+8713.127009367" lastFinishedPulling="2025-11-25 16:50:23.010983777 +0000 UTC m=+8714.614396888" observedRunningTime="2025-11-25 16:50:23.631234797 +0000 UTC m=+8715.234647878" watchObservedRunningTime="2025-11-25 16:50:23.632506321 +0000 UTC m=+8715.235919402" Nov 25 16:50:27 crc kubenswrapper[4879]: I1125 16:50:27.645364 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:50:27 crc kubenswrapper[4879]: E1125 16:50:27.646291 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:50:31 crc kubenswrapper[4879]: I1125 16:50:31.781711 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:31 crc kubenswrapper[4879]: I1125 16:50:31.839298 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxdqh"] Nov 25 16:50:32 crc kubenswrapper[4879]: I1125 16:50:32.713242 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vxdqh" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="registry-server" containerID="cri-o://e4af1be1f3768c30a5682f2ecbf327b98258db6facb5d9a27ef610f9393e09d6" gracePeriod=2 Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.726473 4879 generic.go:334] "Generic (PLEG): container finished" podID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerID="e4af1be1f3768c30a5682f2ecbf327b98258db6facb5d9a27ef610f9393e09d6" exitCode=0 Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.726559 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerDied","Data":"e4af1be1f3768c30a5682f2ecbf327b98258db6facb5d9a27ef610f9393e09d6"} Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.727232 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vxdqh" event={"ID":"c2edf17a-980d-485d-8a6b-bbf01d842c22","Type":"ContainerDied","Data":"ef26f7b84bbdf1055c885ddbbd37e1bc9ea87e453491feccf1b013f3bbf3c573"} Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.727255 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef26f7b84bbdf1055c885ddbbd37e1bc9ea87e453491feccf1b013f3bbf3c573" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.761611 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.846739 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv4kr\" (UniqueName: \"kubernetes.io/projected/c2edf17a-980d-485d-8a6b-bbf01d842c22-kube-api-access-rv4kr\") pod \"c2edf17a-980d-485d-8a6b-bbf01d842c22\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.847000 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-utilities\") pod \"c2edf17a-980d-485d-8a6b-bbf01d842c22\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.847100 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-catalog-content\") pod \"c2edf17a-980d-485d-8a6b-bbf01d842c22\" (UID: \"c2edf17a-980d-485d-8a6b-bbf01d842c22\") " Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.848966 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-utilities" (OuterVolumeSpecName: "utilities") pod "c2edf17a-980d-485d-8a6b-bbf01d842c22" (UID: "c2edf17a-980d-485d-8a6b-bbf01d842c22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.854217 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2edf17a-980d-485d-8a6b-bbf01d842c22-kube-api-access-rv4kr" (OuterVolumeSpecName: "kube-api-access-rv4kr") pod "c2edf17a-980d-485d-8a6b-bbf01d842c22" (UID: "c2edf17a-980d-485d-8a6b-bbf01d842c22"). InnerVolumeSpecName "kube-api-access-rv4kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.867022 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2edf17a-980d-485d-8a6b-bbf01d842c22" (UID: "c2edf17a-980d-485d-8a6b-bbf01d842c22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.949881 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.949923 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2edf17a-980d-485d-8a6b-bbf01d842c22-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:33 crc kubenswrapper[4879]: I1125 16:50:33.949935 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv4kr\" (UniqueName: \"kubernetes.io/projected/c2edf17a-980d-485d-8a6b-bbf01d842c22-kube-api-access-rv4kr\") on node \"crc\" DevicePath \"\"" Nov 25 16:50:34 crc kubenswrapper[4879]: I1125 16:50:34.736548 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vxdqh" Nov 25 16:50:34 crc kubenswrapper[4879]: I1125 16:50:34.768587 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxdqh"] Nov 25 16:50:34 crc kubenswrapper[4879]: I1125 16:50:34.779855 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vxdqh"] Nov 25 16:50:35 crc kubenswrapper[4879]: I1125 16:50:35.656479 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" path="/var/lib/kubelet/pods/c2edf17a-980d-485d-8a6b-bbf01d842c22/volumes" Nov 25 16:50:38 crc kubenswrapper[4879]: I1125 16:50:38.645222 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:50:38 crc kubenswrapper[4879]: E1125 16:50:38.645976 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:50:53 crc kubenswrapper[4879]: I1125 16:50:53.644712 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:50:53 crc kubenswrapper[4879]: E1125 16:50:53.645420 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:51:08 crc kubenswrapper[4879]: I1125 16:51:08.644715 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:51:08 crc kubenswrapper[4879]: E1125 16:51:08.645533 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:51:20 crc kubenswrapper[4879]: I1125 16:51:20.645265 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:51:20 crc kubenswrapper[4879]: E1125 16:51:20.646090 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:51:32 crc kubenswrapper[4879]: I1125 16:51:32.645517 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:51:32 crc kubenswrapper[4879]: E1125 16:51:32.646300 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:51:44 crc kubenswrapper[4879]: I1125 16:51:44.647015 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:51:44 crc kubenswrapper[4879]: E1125 16:51:44.647816 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:51:59 crc kubenswrapper[4879]: I1125 16:51:59.665604 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:51:59 crc kubenswrapper[4879]: E1125 16:51:59.666782 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:52:12 crc kubenswrapper[4879]: I1125 16:52:12.645854 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:52:12 crc kubenswrapper[4879]: E1125 16:52:12.646803 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:52:22 crc kubenswrapper[4879]: I1125 16:52:22.341041 4879 generic.go:334] "Generic (PLEG): container finished" podID="faa2bad0-2473-4bea-b07b-1b95f0d02413" containerID="366eb86ef586380cda5d9081398a4fea7d0139e56d0521964a215bef58dc3655" exitCode=0 Nov 25 16:52:22 crc kubenswrapper[4879]: I1125 16:52:22.341156 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" event={"ID":"faa2bad0-2473-4bea-b07b-1b95f0d02413","Type":"ContainerDied","Data":"366eb86ef586380cda5d9081398a4fea7d0139e56d0521964a215bef58dc3655"} Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.819434 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.984201 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ssh-key\") pod \"faa2bad0-2473-4bea-b07b-1b95f0d02413\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.984805 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ceph\") pod \"faa2bad0-2473-4bea-b07b-1b95f0d02413\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.984837 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5qmp\" (UniqueName: \"kubernetes.io/projected/faa2bad0-2473-4bea-b07b-1b95f0d02413-kube-api-access-m5qmp\") pod \"faa2bad0-2473-4bea-b07b-1b95f0d02413\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.984925 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-inventory\") pod \"faa2bad0-2473-4bea-b07b-1b95f0d02413\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.984974 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-combined-ca-bundle\") pod \"faa2bad0-2473-4bea-b07b-1b95f0d02413\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.985142 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-agent-neutron-config-0\") pod \"faa2bad0-2473-4bea-b07b-1b95f0d02413\" (UID: \"faa2bad0-2473-4bea-b07b-1b95f0d02413\") " Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.991837 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "faa2bad0-2473-4bea-b07b-1b95f0d02413" (UID: "faa2bad0-2473-4bea-b07b-1b95f0d02413"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:52:23 crc kubenswrapper[4879]: I1125 16:52:23.992332 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faa2bad0-2473-4bea-b07b-1b95f0d02413-kube-api-access-m5qmp" (OuterVolumeSpecName: "kube-api-access-m5qmp") pod "faa2bad0-2473-4bea-b07b-1b95f0d02413" (UID: "faa2bad0-2473-4bea-b07b-1b95f0d02413"). InnerVolumeSpecName "kube-api-access-m5qmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:23.994306 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ceph" (OuterVolumeSpecName: "ceph") pod "faa2bad0-2473-4bea-b07b-1b95f0d02413" (UID: "faa2bad0-2473-4bea-b07b-1b95f0d02413"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.025961 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "faa2bad0-2473-4bea-b07b-1b95f0d02413" (UID: "faa2bad0-2473-4bea-b07b-1b95f0d02413"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.040899 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "faa2bad0-2473-4bea-b07b-1b95f0d02413" (UID: "faa2bad0-2473-4bea-b07b-1b95f0d02413"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.047387 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-inventory" (OuterVolumeSpecName: "inventory") pod "faa2bad0-2473-4bea-b07b-1b95f0d02413" (UID: "faa2bad0-2473-4bea-b07b-1b95f0d02413"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.088467 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.088518 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.088531 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.088550 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.088563 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5qmp\" (UniqueName: \"kubernetes.io/projected/faa2bad0-2473-4bea-b07b-1b95f0d02413-kube-api-access-m5qmp\") on node \"crc\" DevicePath \"\"" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.088577 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/faa2bad0-2473-4bea-b07b-1b95f0d02413-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.363578 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" event={"ID":"faa2bad0-2473-4bea-b07b-1b95f0d02413","Type":"ContainerDied","Data":"53e41edc8b2f9e77075f8b33f4dfc3ca7cb39c48d731da5867f3b6adf2025838"} Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.363954 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53e41edc8b2f9e77075f8b33f4dfc3ca7cb39c48d731da5867f3b6adf2025838" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.363614 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-gn4dc" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.454939 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5"] Nov 25 16:52:24 crc kubenswrapper[4879]: E1125 16:52:24.455468 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="registry-server" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.455490 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="registry-server" Nov 25 16:52:24 crc kubenswrapper[4879]: E1125 16:52:24.455515 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="extract-utilities" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.455523 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="extract-utilities" Nov 25 16:52:24 crc kubenswrapper[4879]: E1125 16:52:24.455544 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faa2bad0-2473-4bea-b07b-1b95f0d02413" containerName="neutron-sriov-openstack-openstack-cell1" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.455551 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="faa2bad0-2473-4bea-b07b-1b95f0d02413" containerName="neutron-sriov-openstack-openstack-cell1" Nov 25 16:52:24 crc kubenswrapper[4879]: E1125 16:52:24.455572 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="extract-content" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.455579 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="extract-content" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.455889 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="faa2bad0-2473-4bea-b07b-1b95f0d02413" containerName="neutron-sriov-openstack-openstack-cell1" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.455930 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2edf17a-980d-485d-8a6b-bbf01d842c22" containerName="registry-server" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.456871 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.459631 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.459822 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.460023 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.460201 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.460368 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.468143 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5"] Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.598905 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7t4s\" (UniqueName: \"kubernetes.io/projected/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-kube-api-access-l7t4s\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.599219 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.599356 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.599459 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.599661 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.599853 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.701621 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.701714 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.701755 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.701805 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.701894 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.701943 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7t4s\" (UniqueName: \"kubernetes.io/projected/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-kube-api-access-l7t4s\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.706505 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.706916 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.707356 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.707556 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.707743 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.717700 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7t4s\" (UniqueName: \"kubernetes.io/projected/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-kube-api-access-l7t4s\") pod \"neutron-dhcp-openstack-openstack-cell1-rhwf5\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:24 crc kubenswrapper[4879]: I1125 16:52:24.777088 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:52:25 crc kubenswrapper[4879]: I1125 16:52:25.295324 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5"] Nov 25 16:52:25 crc kubenswrapper[4879]: I1125 16:52:25.376979 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" event={"ID":"774dd8c2-a2e8-4639-82ab-122fa41f5dbb","Type":"ContainerStarted","Data":"21b4c765433c61a7f68bed9add678b67180875fadc9cdca8a34fd8b6698aa562"} Nov 25 16:52:25 crc kubenswrapper[4879]: I1125 16:52:25.645247 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:52:26 crc kubenswrapper[4879]: I1125 16:52:26.390803 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"2bc4b857534032b264f4368c19adf3cbfa1defcdc0a92919ffbebf827238f945"} Nov 25 16:52:26 crc kubenswrapper[4879]: I1125 16:52:26.393241 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" event={"ID":"774dd8c2-a2e8-4639-82ab-122fa41f5dbb","Type":"ContainerStarted","Data":"07ad49f552bda348c5611970ae9b2d168464893d7bc473e83564f78f7697492d"} Nov 25 16:52:26 crc kubenswrapper[4879]: I1125 16:52:26.469915 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" podStartSLOduration=2.07082765 podStartE2EDuration="2.469891522s" podCreationTimestamp="2025-11-25 16:52:24 +0000 UTC" firstStartedPulling="2025-11-25 16:52:25.293602443 +0000 UTC m=+8836.897015514" lastFinishedPulling="2025-11-25 16:52:25.692666325 +0000 UTC m=+8837.296079386" observedRunningTime="2025-11-25 16:52:26.436273002 +0000 UTC m=+8838.039686093" watchObservedRunningTime="2025-11-25 16:52:26.469891522 +0000 UTC m=+8838.073304593" Nov 25 16:54:47 crc kubenswrapper[4879]: I1125 16:54:47.409482 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:54:47 crc kubenswrapper[4879]: I1125 16:54:47.410321 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:55:17 crc kubenswrapper[4879]: I1125 16:55:17.408598 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:55:17 crc kubenswrapper[4879]: I1125 16:55:17.409402 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.409259 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.410060 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.410132 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.411049 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2bc4b857534032b264f4368c19adf3cbfa1defcdc0a92919ffbebf827238f945"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.411131 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://2bc4b857534032b264f4368c19adf3cbfa1defcdc0a92919ffbebf827238f945" gracePeriod=600 Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.581764 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="2bc4b857534032b264f4368c19adf3cbfa1defcdc0a92919ffbebf827238f945" exitCode=0 Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.581833 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"2bc4b857534032b264f4368c19adf3cbfa1defcdc0a92919ffbebf827238f945"} Nov 25 16:55:47 crc kubenswrapper[4879]: I1125 16:55:47.581880 4879 scope.go:117] "RemoveContainer" containerID="9c4196b0b3a294eda868e27353d5f665bc33d4be55f6c9e4e1cb261890f6a8d0" Nov 25 16:55:49 crc kubenswrapper[4879]: I1125 16:55:49.610969 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152"} Nov 25 16:56:06 crc kubenswrapper[4879]: I1125 16:56:06.810234 4879 generic.go:334] "Generic (PLEG): container finished" podID="774dd8c2-a2e8-4639-82ab-122fa41f5dbb" containerID="07ad49f552bda348c5611970ae9b2d168464893d7bc473e83564f78f7697492d" exitCode=0 Nov 25 16:56:06 crc kubenswrapper[4879]: I1125 16:56:06.810425 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" event={"ID":"774dd8c2-a2e8-4639-82ab-122fa41f5dbb","Type":"ContainerDied","Data":"07ad49f552bda348c5611970ae9b2d168464893d7bc473e83564f78f7697492d"} Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.241840 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.411448 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ssh-key\") pod \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.411577 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-inventory\") pod \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.411654 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7t4s\" (UniqueName: \"kubernetes.io/projected/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-kube-api-access-l7t4s\") pod \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.411863 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ceph\") pod \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.411926 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-agent-neutron-config-0\") pod \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.411988 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-combined-ca-bundle\") pod \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\" (UID: \"774dd8c2-a2e8-4639-82ab-122fa41f5dbb\") " Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.417884 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-kube-api-access-l7t4s" (OuterVolumeSpecName: "kube-api-access-l7t4s") pod "774dd8c2-a2e8-4639-82ab-122fa41f5dbb" (UID: "774dd8c2-a2e8-4639-82ab-122fa41f5dbb"). InnerVolumeSpecName "kube-api-access-l7t4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.417922 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ceph" (OuterVolumeSpecName: "ceph") pod "774dd8c2-a2e8-4639-82ab-122fa41f5dbb" (UID: "774dd8c2-a2e8-4639-82ab-122fa41f5dbb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.418216 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "774dd8c2-a2e8-4639-82ab-122fa41f5dbb" (UID: "774dd8c2-a2e8-4639-82ab-122fa41f5dbb"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.448518 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "774dd8c2-a2e8-4639-82ab-122fa41f5dbb" (UID: "774dd8c2-a2e8-4639-82ab-122fa41f5dbb"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.448992 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-inventory" (OuterVolumeSpecName: "inventory") pod "774dd8c2-a2e8-4639-82ab-122fa41f5dbb" (UID: "774dd8c2-a2e8-4639-82ab-122fa41f5dbb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.449238 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "774dd8c2-a2e8-4639-82ab-122fa41f5dbb" (UID: "774dd8c2-a2e8-4639-82ab-122fa41f5dbb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.514182 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.514222 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.514234 4879 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.514245 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.514256 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.514265 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7t4s\" (UniqueName: \"kubernetes.io/projected/774dd8c2-a2e8-4639-82ab-122fa41f5dbb-kube-api-access-l7t4s\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.830844 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" event={"ID":"774dd8c2-a2e8-4639-82ab-122fa41f5dbb","Type":"ContainerDied","Data":"21b4c765433c61a7f68bed9add678b67180875fadc9cdca8a34fd8b6698aa562"} Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.830904 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21b4c765433c61a7f68bed9add678b67180875fadc9cdca8a34fd8b6698aa562" Nov 25 16:56:08 crc kubenswrapper[4879]: I1125 16:56:08.830874 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-rhwf5" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.139662 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fwd6z"] Nov 25 16:56:20 crc kubenswrapper[4879]: E1125 16:56:20.142186 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="774dd8c2-a2e8-4639-82ab-122fa41f5dbb" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.142274 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="774dd8c2-a2e8-4639-82ab-122fa41f5dbb" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.142550 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="774dd8c2-a2e8-4639-82ab-122fa41f5dbb" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.146944 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.170826 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-utilities\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.171361 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-catalog-content\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.171506 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rkhd\" (UniqueName: \"kubernetes.io/projected/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-kube-api-access-7rkhd\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.172518 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fwd6z"] Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.273300 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-catalog-content\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.273487 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rkhd\" (UniqueName: \"kubernetes.io/projected/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-kube-api-access-7rkhd\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.273644 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-utilities\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.273751 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-catalog-content\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.274100 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-utilities\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.303457 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rkhd\" (UniqueName: \"kubernetes.io/projected/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-kube-api-access-7rkhd\") pod \"certified-operators-fwd6z\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:20 crc kubenswrapper[4879]: I1125 16:56:20.489052 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:21 crc kubenswrapper[4879]: I1125 16:56:21.056574 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fwd6z"] Nov 25 16:56:21 crc kubenswrapper[4879]: I1125 16:56:21.981654 4879 generic.go:334] "Generic (PLEG): container finished" podID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerID="ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0" exitCode=0 Nov 25 16:56:21 crc kubenswrapper[4879]: I1125 16:56:21.981741 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerDied","Data":"ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0"} Nov 25 16:56:21 crc kubenswrapper[4879]: I1125 16:56:21.982295 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerStarted","Data":"05267382b6fe5feda0f838127aa72f022533fd0a9c3f99a23f7039f27c5f2564"} Nov 25 16:56:21 crc kubenswrapper[4879]: I1125 16:56:21.984562 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 16:56:24 crc kubenswrapper[4879]: I1125 16:56:24.004410 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerStarted","Data":"5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f"} Nov 25 16:56:26 crc kubenswrapper[4879]: I1125 16:56:26.027861 4879 generic.go:334] "Generic (PLEG): container finished" podID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerID="5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f" exitCode=0 Nov 25 16:56:26 crc kubenswrapper[4879]: I1125 16:56:26.027969 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerDied","Data":"5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f"} Nov 25 16:56:27 crc kubenswrapper[4879]: I1125 16:56:27.049111 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerStarted","Data":"57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8"} Nov 25 16:56:27 crc kubenswrapper[4879]: I1125 16:56:27.083447 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fwd6z" podStartSLOduration=2.636190695 podStartE2EDuration="7.083429184s" podCreationTimestamp="2025-11-25 16:56:20 +0000 UTC" firstStartedPulling="2025-11-25 16:56:21.984264904 +0000 UTC m=+9073.587677975" lastFinishedPulling="2025-11-25 16:56:26.431503383 +0000 UTC m=+9078.034916464" observedRunningTime="2025-11-25 16:56:27.069620615 +0000 UTC m=+9078.673033696" watchObservedRunningTime="2025-11-25 16:56:27.083429184 +0000 UTC m=+9078.686842245" Nov 25 16:56:30 crc kubenswrapper[4879]: I1125 16:56:30.490353 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:30 crc kubenswrapper[4879]: I1125 16:56:30.490897 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:30 crc kubenswrapper[4879]: I1125 16:56:30.535527 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:31 crc kubenswrapper[4879]: I1125 16:56:31.136994 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:31 crc kubenswrapper[4879]: I1125 16:56:31.921179 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fwd6z"] Nov 25 16:56:33 crc kubenswrapper[4879]: I1125 16:56:33.112877 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fwd6z" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="registry-server" containerID="cri-o://57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8" gracePeriod=2 Nov 25 16:56:33 crc kubenswrapper[4879]: I1125 16:56:33.974031 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.074663 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-utilities\") pod \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.074753 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rkhd\" (UniqueName: \"kubernetes.io/projected/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-kube-api-access-7rkhd\") pod \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.075040 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-catalog-content\") pod \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\" (UID: \"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0\") " Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.075818 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-utilities" (OuterVolumeSpecName: "utilities") pod "f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" (UID: "f238adf5-ec4a-4a7b-9fa9-52826df6e4e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.081450 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-kube-api-access-7rkhd" (OuterVolumeSpecName: "kube-api-access-7rkhd") pod "f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" (UID: "f238adf5-ec4a-4a7b-9fa9-52826df6e4e0"). InnerVolumeSpecName "kube-api-access-7rkhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.130851 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" (UID: "f238adf5-ec4a-4a7b-9fa9-52826df6e4e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.139824 4879 generic.go:334] "Generic (PLEG): container finished" podID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerID="57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8" exitCode=0 Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.139867 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerDied","Data":"57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8"} Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.139893 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fwd6z" event={"ID":"f238adf5-ec4a-4a7b-9fa9-52826df6e4e0","Type":"ContainerDied","Data":"05267382b6fe5feda0f838127aa72f022533fd0a9c3f99a23f7039f27c5f2564"} Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.139910 4879 scope.go:117] "RemoveContainer" containerID="57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.140032 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fwd6z" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.177271 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.177701 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rkhd\" (UniqueName: \"kubernetes.io/projected/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-kube-api-access-7rkhd\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.177714 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.177589 4879 scope.go:117] "RemoveContainer" containerID="5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.181452 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fwd6z"] Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.194226 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fwd6z"] Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.204291 4879 scope.go:117] "RemoveContainer" containerID="ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.255084 4879 scope.go:117] "RemoveContainer" containerID="57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8" Nov 25 16:56:34 crc kubenswrapper[4879]: E1125 16:56:34.255448 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8\": container with ID starting with 57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8 not found: ID does not exist" containerID="57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.255522 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8"} err="failed to get container status \"57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8\": rpc error: code = NotFound desc = could not find container \"57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8\": container with ID starting with 57200d68ea8cb098b9c737da2937db36d64e1fcae3f4f245f7a8b4a0e69571f8 not found: ID does not exist" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.255556 4879 scope.go:117] "RemoveContainer" containerID="5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f" Nov 25 16:56:34 crc kubenswrapper[4879]: E1125 16:56:34.255870 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f\": container with ID starting with 5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f not found: ID does not exist" containerID="5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.255909 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f"} err="failed to get container status \"5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f\": rpc error: code = NotFound desc = could not find container \"5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f\": container with ID starting with 5d7e890aee9c24d6978d0913cd1cfef249f52249c9a22735589043c801732f8f not found: ID does not exist" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.255939 4879 scope.go:117] "RemoveContainer" containerID="ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0" Nov 25 16:56:34 crc kubenswrapper[4879]: E1125 16:56:34.256264 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0\": container with ID starting with ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0 not found: ID does not exist" containerID="ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0" Nov 25 16:56:34 crc kubenswrapper[4879]: I1125 16:56:34.256302 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0"} err="failed to get container status \"ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0\": rpc error: code = NotFound desc = could not find container \"ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0\": container with ID starting with ac425cdfd4dfdbdf6d3f41ea9a486c23ee966f320b81dcbcd8ce9fba0aa000d0 not found: ID does not exist" Nov 25 16:56:35 crc kubenswrapper[4879]: I1125 16:56:35.656910 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" path="/var/lib/kubelet/pods/f238adf5-ec4a-4a7b-9fa9-52826df6e4e0/volumes" Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.123544 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.124389 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="b719ae29-7488-49f4-8899-859e6271d428" containerName="nova-cell0-conductor-conductor" containerID="cri-o://9e6034b84e23297b847e6429e0891118fefda2c5ab4f1ad763fc43e577af1bea" gracePeriod=30 Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.602712 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.603244 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" containerName="nova-cell1-conductor-conductor" containerID="cri-o://5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2" gracePeriod=30 Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.802250 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.802532 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="92111edc-c57b-453c-bfbb-508c29a7c0f0" containerName="nova-scheduler-scheduler" containerID="cri-o://9a41940d1d7096a7ac78292ded9aeabee98b8be50ed168573db512a271259bd8" gracePeriod=30 Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.828229 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.828481 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-log" containerID="cri-o://6aad49e0f6a4f078a1a8a1e1da15c03eba66261a5fbdcc7d890da1e9f55aa135" gracePeriod=30 Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.828570 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-api" containerID="cri-o://120d120230066419362a4ff48d3790b4753203ed6d6637d59dc1efcc1d1a37c3" gracePeriod=30 Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.840920 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.841141 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-log" containerID="cri-o://2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e" gracePeriod=30 Nov 25 16:56:41 crc kubenswrapper[4879]: I1125 16:56:41.841863 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-metadata" containerID="cri-o://a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe" gracePeriod=30 Nov 25 16:56:42 crc kubenswrapper[4879]: I1125 16:56:42.238002 4879 generic.go:334] "Generic (PLEG): container finished" podID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerID="6aad49e0f6a4f078a1a8a1e1da15c03eba66261a5fbdcc7d890da1e9f55aa135" exitCode=143 Nov 25 16:56:42 crc kubenswrapper[4879]: I1125 16:56:42.238204 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4","Type":"ContainerDied","Data":"6aad49e0f6a4f078a1a8a1e1da15c03eba66261a5fbdcc7d890da1e9f55aa135"} Nov 25 16:56:42 crc kubenswrapper[4879]: I1125 16:56:42.241617 4879 generic.go:334] "Generic (PLEG): container finished" podID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerID="2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e" exitCode=143 Nov 25 16:56:42 crc kubenswrapper[4879]: I1125 16:56:42.241688 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"557da3e1-38c8-4663-85cf-14b4d0ca28b2","Type":"ContainerDied","Data":"2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e"} Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.261328 4879 generic.go:334] "Generic (PLEG): container finished" podID="b719ae29-7488-49f4-8899-859e6271d428" containerID="9e6034b84e23297b847e6429e0891118fefda2c5ab4f1ad763fc43e577af1bea" exitCode=0 Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.261419 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b719ae29-7488-49f4-8899-859e6271d428","Type":"ContainerDied","Data":"9e6034b84e23297b847e6429e0891118fefda2c5ab4f1ad763fc43e577af1bea"} Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.263320 4879 generic.go:334] "Generic (PLEG): container finished" podID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" containerID="5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2" exitCode=0 Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.263396 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5db21db9-0d2e-42eb-8f20-ea1af2d0641a","Type":"ContainerDied","Data":"5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2"} Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.263527 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"5db21db9-0d2e-42eb-8f20-ea1af2d0641a","Type":"ContainerDied","Data":"bb910c3152a1978ccc2295b13b68b3757f2cc1802d50126cece4b8668e189c82"} Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.263621 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb910c3152a1978ccc2295b13b68b3757f2cc1802d50126cece4b8668e189c82" Nov 25 16:56:43 crc kubenswrapper[4879]: E1125 16:56:43.543445 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2 is running failed: container process not found" containerID="5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 16:56:43 crc kubenswrapper[4879]: E1125 16:56:43.544279 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2 is running failed: container process not found" containerID="5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 16:56:43 crc kubenswrapper[4879]: E1125 16:56:43.546075 4879 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2 is running failed: container process not found" containerID="5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 25 16:56:43 crc kubenswrapper[4879]: E1125 16:56:43.546117 4879 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" containerName="nova-cell1-conductor-conductor" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.646573 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.655892 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.791005 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-combined-ca-bundle\") pod \"b719ae29-7488-49f4-8899-859e6271d428\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.791368 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-config-data\") pod \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.791409 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9fbj\" (UniqueName: \"kubernetes.io/projected/b719ae29-7488-49f4-8899-859e6271d428-kube-api-access-c9fbj\") pod \"b719ae29-7488-49f4-8899-859e6271d428\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.791481 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-combined-ca-bundle\") pod \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.791517 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-config-data\") pod \"b719ae29-7488-49f4-8899-859e6271d428\" (UID: \"b719ae29-7488-49f4-8899-859e6271d428\") " Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.791547 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqbw5\" (UniqueName: \"kubernetes.io/projected/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-kube-api-access-sqbw5\") pod \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\" (UID: \"5db21db9-0d2e-42eb-8f20-ea1af2d0641a\") " Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.797648 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-kube-api-access-sqbw5" (OuterVolumeSpecName: "kube-api-access-sqbw5") pod "5db21db9-0d2e-42eb-8f20-ea1af2d0641a" (UID: "5db21db9-0d2e-42eb-8f20-ea1af2d0641a"). InnerVolumeSpecName "kube-api-access-sqbw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.798571 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b719ae29-7488-49f4-8899-859e6271d428-kube-api-access-c9fbj" (OuterVolumeSpecName: "kube-api-access-c9fbj") pod "b719ae29-7488-49f4-8899-859e6271d428" (UID: "b719ae29-7488-49f4-8899-859e6271d428"). InnerVolumeSpecName "kube-api-access-c9fbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.822322 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5db21db9-0d2e-42eb-8f20-ea1af2d0641a" (UID: "5db21db9-0d2e-42eb-8f20-ea1af2d0641a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.828986 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-config-data" (OuterVolumeSpecName: "config-data") pod "b719ae29-7488-49f4-8899-859e6271d428" (UID: "b719ae29-7488-49f4-8899-859e6271d428"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.830905 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-config-data" (OuterVolumeSpecName: "config-data") pod "5db21db9-0d2e-42eb-8f20-ea1af2d0641a" (UID: "5db21db9-0d2e-42eb-8f20-ea1af2d0641a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.831664 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b719ae29-7488-49f4-8899-859e6271d428" (UID: "b719ae29-7488-49f4-8899-859e6271d428"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.894390 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.894424 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9fbj\" (UniqueName: \"kubernetes.io/projected/b719ae29-7488-49f4-8899-859e6271d428-kube-api-access-c9fbj\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.894442 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.894453 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.894464 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqbw5\" (UniqueName: \"kubernetes.io/projected/5db21db9-0d2e-42eb-8f20-ea1af2d0641a-kube-api-access-sqbw5\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:43 crc kubenswrapper[4879]: I1125 16:56:43.894472 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b719ae29-7488-49f4-8899-859e6271d428-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.286720 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.287266 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.288351 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b719ae29-7488-49f4-8899-859e6271d428","Type":"ContainerDied","Data":"87349c583392da7693072c62c9340787f3e92bf6be1b181a502869b5bd0d5f5e"} Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.288391 4879 scope.go:117] "RemoveContainer" containerID="9e6034b84e23297b847e6429e0891118fefda2c5ab4f1ad763fc43e577af1bea" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.359574 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.381697 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.407187 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.435179 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.464196 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: E1125 16:56:44.464943 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="registry-server" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.464965 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="registry-server" Nov 25 16:56:44 crc kubenswrapper[4879]: E1125 16:56:44.464982 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b719ae29-7488-49f4-8899-859e6271d428" containerName="nova-cell0-conductor-conductor" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.464988 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b719ae29-7488-49f4-8899-859e6271d428" containerName="nova-cell0-conductor-conductor" Nov 25 16:56:44 crc kubenswrapper[4879]: E1125 16:56:44.465005 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="extract-utilities" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.465011 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="extract-utilities" Nov 25 16:56:44 crc kubenswrapper[4879]: E1125 16:56:44.465030 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" containerName="nova-cell1-conductor-conductor" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.465036 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" containerName="nova-cell1-conductor-conductor" Nov 25 16:56:44 crc kubenswrapper[4879]: E1125 16:56:44.465057 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="extract-content" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.465062 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="extract-content" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.465273 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f238adf5-ec4a-4a7b-9fa9-52826df6e4e0" containerName="registry-server" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.465283 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" containerName="nova-cell1-conductor-conductor" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.465310 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b719ae29-7488-49f4-8899-859e6271d428" containerName="nova-cell0-conductor-conductor" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.466023 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.469806 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.477854 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.481762 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.486793 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.493148 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.505869 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.612862 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c68eddd9-47a6-45ca-9873-636e7785a07d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.612900 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c32c35-2748-44d3-9810-98ffc9c6011b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.612930 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpb26\" (UniqueName: \"kubernetes.io/projected/c9c32c35-2748-44d3-9810-98ffc9c6011b-kube-api-access-dpb26\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.613084 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sn6z\" (UniqueName: \"kubernetes.io/projected/c68eddd9-47a6-45ca-9873-636e7785a07d-kube-api-access-5sn6z\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.613198 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c68eddd9-47a6-45ca-9873-636e7785a07d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.613275 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c32c35-2748-44d3-9810-98ffc9c6011b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.715361 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c32c35-2748-44d3-9810-98ffc9c6011b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.715550 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c68eddd9-47a6-45ca-9873-636e7785a07d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.715572 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c32c35-2748-44d3-9810-98ffc9c6011b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.715597 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpb26\" (UniqueName: \"kubernetes.io/projected/c9c32c35-2748-44d3-9810-98ffc9c6011b-kube-api-access-dpb26\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.715638 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sn6z\" (UniqueName: \"kubernetes.io/projected/c68eddd9-47a6-45ca-9873-636e7785a07d-kube-api-access-5sn6z\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.715692 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c68eddd9-47a6-45ca-9873-636e7785a07d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.720022 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c68eddd9-47a6-45ca-9873-636e7785a07d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.720357 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c68eddd9-47a6-45ca-9873-636e7785a07d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.721401 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c32c35-2748-44d3-9810-98ffc9c6011b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.721853 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c32c35-2748-44d3-9810-98ffc9c6011b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.732438 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sn6z\" (UniqueName: \"kubernetes.io/projected/c68eddd9-47a6-45ca-9873-636e7785a07d-kube-api-access-5sn6z\") pod \"nova-cell0-conductor-0\" (UID: \"c68eddd9-47a6-45ca-9873-636e7785a07d\") " pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.733793 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpb26\" (UniqueName: \"kubernetes.io/projected/c9c32c35-2748-44d3-9810-98ffc9c6011b-kube-api-access-dpb26\") pod \"nova-cell1-conductor-0\" (UID: \"c9c32c35-2748-44d3-9810-98ffc9c6011b\") " pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.785656 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:44 crc kubenswrapper[4879]: I1125 16:56:44.801086 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:45 crc kubenswrapper[4879]: I1125 16:56:45.278472 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 25 16:56:45 crc kubenswrapper[4879]: I1125 16:56:45.298007 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.90:8775/\": read tcp 10.217.0.2:58140->10.217.1.90:8775: read: connection reset by peer" Nov 25 16:56:45 crc kubenswrapper[4879]: I1125 16:56:45.298006 4879 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.90:8775/\": read tcp 10.217.0.2:58138->10.217.1.90:8775: read: connection reset by peer" Nov 25 16:56:45 crc kubenswrapper[4879]: I1125 16:56:45.365716 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 25 16:56:45 crc kubenswrapper[4879]: W1125 16:56:45.569160 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc68eddd9_47a6_45ca_9873_636e7785a07d.slice/crio-357dc7d85f7dc42a9b96f97a96ac27ceeca73a3d822ecb625c842dea428412a1 WatchSource:0}: Error finding container 357dc7d85f7dc42a9b96f97a96ac27ceeca73a3d822ecb625c842dea428412a1: Status 404 returned error can't find the container with id 357dc7d85f7dc42a9b96f97a96ac27ceeca73a3d822ecb625c842dea428412a1 Nov 25 16:56:45 crc kubenswrapper[4879]: I1125 16:56:45.759054 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5db21db9-0d2e-42eb-8f20-ea1af2d0641a" path="/var/lib/kubelet/pods/5db21db9-0d2e-42eb-8f20-ea1af2d0641a/volumes" Nov 25 16:56:45 crc kubenswrapper[4879]: I1125 16:56:45.857274 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b719ae29-7488-49f4-8899-859e6271d428" path="/var/lib/kubelet/pods/b719ae29-7488-49f4-8899-859e6271d428/volumes" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.320561 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.320593 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c9c32c35-2748-44d3-9810-98ffc9c6011b","Type":"ContainerStarted","Data":"af259ae48ca6366381dcddddb1f81ba76b663ef872d9274315523f856ae93c2c"} Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.328283 4879 generic.go:334] "Generic (PLEG): container finished" podID="92111edc-c57b-453c-bfbb-508c29a7c0f0" containerID="9a41940d1d7096a7ac78292ded9aeabee98b8be50ed168573db512a271259bd8" exitCode=0 Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.328371 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92111edc-c57b-453c-bfbb-508c29a7c0f0","Type":"ContainerDied","Data":"9a41940d1d7096a7ac78292ded9aeabee98b8be50ed168573db512a271259bd8"} Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.334445 4879 generic.go:334] "Generic (PLEG): container finished" podID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerID="a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe" exitCode=0 Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.334519 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"557da3e1-38c8-4663-85cf-14b4d0ca28b2","Type":"ContainerDied","Data":"a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe"} Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.334545 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"557da3e1-38c8-4663-85cf-14b4d0ca28b2","Type":"ContainerDied","Data":"4b850a28b66c828f7854b00af831e57e1f1b2731558aae7473e2dbc921687683"} Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.334563 4879 scope.go:117] "RemoveContainer" containerID="a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.334706 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.337293 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c68eddd9-47a6-45ca-9873-636e7785a07d","Type":"ContainerStarted","Data":"357dc7d85f7dc42a9b96f97a96ac27ceeca73a3d822ecb625c842dea428412a1"} Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.359578 4879 generic.go:334] "Generic (PLEG): container finished" podID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerID="120d120230066419362a4ff48d3790b4753203ed6d6637d59dc1efcc1d1a37c3" exitCode=0 Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.359626 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4","Type":"ContainerDied","Data":"120d120230066419362a4ff48d3790b4753203ed6d6637d59dc1efcc1d1a37c3"} Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.428988 4879 scope.go:117] "RemoveContainer" containerID="2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.453564 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-config-data\") pod \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.453654 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-combined-ca-bundle\") pod \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.453705 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gpmm\" (UniqueName: \"kubernetes.io/projected/557da3e1-38c8-4663-85cf-14b4d0ca28b2-kube-api-access-6gpmm\") pod \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.453880 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/557da3e1-38c8-4663-85cf-14b4d0ca28b2-logs\") pod \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\" (UID: \"557da3e1-38c8-4663-85cf-14b4d0ca28b2\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.461730 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/557da3e1-38c8-4663-85cf-14b4d0ca28b2-logs" (OuterVolumeSpecName: "logs") pod "557da3e1-38c8-4663-85cf-14b4d0ca28b2" (UID: "557da3e1-38c8-4663-85cf-14b4d0ca28b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.462020 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.473968 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/557da3e1-38c8-4663-85cf-14b4d0ca28b2-kube-api-access-6gpmm" (OuterVolumeSpecName: "kube-api-access-6gpmm") pod "557da3e1-38c8-4663-85cf-14b4d0ca28b2" (UID: "557da3e1-38c8-4663-85cf-14b4d0ca28b2"). InnerVolumeSpecName "kube-api-access-6gpmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.517355 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "557da3e1-38c8-4663-85cf-14b4d0ca28b2" (UID: "557da3e1-38c8-4663-85cf-14b4d0ca28b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.524230 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-config-data" (OuterVolumeSpecName: "config-data") pod "557da3e1-38c8-4663-85cf-14b4d0ca28b2" (UID: "557da3e1-38c8-4663-85cf-14b4d0ca28b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.536361 4879 scope.go:117] "RemoveContainer" containerID="a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe" Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.537000 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe\": container with ID starting with a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe not found: ID does not exist" containerID="a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.537035 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe"} err="failed to get container status \"a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe\": rpc error: code = NotFound desc = could not find container \"a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe\": container with ID starting with a441fff7866606ba6f0c9d8e446439535514812e2a439ad2d4987d6c93afd2fe not found: ID does not exist" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.537059 4879 scope.go:117] "RemoveContainer" containerID="2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e" Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.537428 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e\": container with ID starting with 2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e not found: ID does not exist" containerID="2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.537472 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e"} err="failed to get container status \"2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e\": rpc error: code = NotFound desc = could not find container \"2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e\": container with ID starting with 2f18f7d8757a28f2b7aec6b92472ba4a214eb246e4eab570cf368792c489799e not found: ID does not exist" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.555997 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-config-data\") pod \"92111edc-c57b-453c-bfbb-508c29a7c0f0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.556477 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p629q\" (UniqueName: \"kubernetes.io/projected/92111edc-c57b-453c-bfbb-508c29a7c0f0-kube-api-access-p629q\") pod \"92111edc-c57b-453c-bfbb-508c29a7c0f0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.556504 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-combined-ca-bundle\") pod \"92111edc-c57b-453c-bfbb-508c29a7c0f0\" (UID: \"92111edc-c57b-453c-bfbb-508c29a7c0f0\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.557180 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/557da3e1-38c8-4663-85cf-14b4d0ca28b2-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.557203 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.557212 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/557da3e1-38c8-4663-85cf-14b4d0ca28b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.557221 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gpmm\" (UniqueName: \"kubernetes.io/projected/557da3e1-38c8-4663-85cf-14b4d0ca28b2-kube-api-access-6gpmm\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.559968 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92111edc-c57b-453c-bfbb-508c29a7c0f0-kube-api-access-p629q" (OuterVolumeSpecName: "kube-api-access-p629q") pod "92111edc-c57b-453c-bfbb-508c29a7c0f0" (UID: "92111edc-c57b-453c-bfbb-508c29a7c0f0"). InnerVolumeSpecName "kube-api-access-p629q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.599261 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-config-data" (OuterVolumeSpecName: "config-data") pod "92111edc-c57b-453c-bfbb-508c29a7c0f0" (UID: "92111edc-c57b-453c-bfbb-508c29a7c0f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.600430 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92111edc-c57b-453c-bfbb-508c29a7c0f0" (UID: "92111edc-c57b-453c-bfbb-508c29a7c0f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.603674 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.678745 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p629q\" (UniqueName: \"kubernetes.io/projected/92111edc-c57b-453c-bfbb-508c29a7c0f0-kube-api-access-p629q\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.678778 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.678787 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92111edc-c57b-453c-bfbb-508c29a7c0f0-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.702197 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.718029 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.728370 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.728791 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-metadata" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.728802 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-metadata" Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.728823 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92111edc-c57b-453c-bfbb-508c29a7c0f0" containerName="nova-scheduler-scheduler" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.728828 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="92111edc-c57b-453c-bfbb-508c29a7c0f0" containerName="nova-scheduler-scheduler" Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.728853 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-log" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.728860 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-log" Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.728883 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-api" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.728888 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-api" Nov 25 16:56:46 crc kubenswrapper[4879]: E1125 16:56:46.728904 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-log" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.728910 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-log" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.729103 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-log" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.729134 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="92111edc-c57b-453c-bfbb-508c29a7c0f0" containerName="nova-scheduler-scheduler" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.729150 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-log" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.729158 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" containerName="nova-api-api" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.729181 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" containerName="nova-metadata-metadata" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.730305 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.734849 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.739546 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.787732 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-config-data\") pod \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.787817 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ck22\" (UniqueName: \"kubernetes.io/projected/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-kube-api-access-2ck22\") pod \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.787893 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-combined-ca-bundle\") pod \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.793158 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-logs\") pod \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\" (UID: \"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4\") " Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.793529 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-config-data\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.793635 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.793682 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-logs\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.793897 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgh92\" (UniqueName: \"kubernetes.io/projected/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-kube-api-access-tgh92\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.813878 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-kube-api-access-2ck22" (OuterVolumeSpecName: "kube-api-access-2ck22") pod "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" (UID: "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4"). InnerVolumeSpecName "kube-api-access-2ck22". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.829385 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" (UID: "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.835286 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-config-data" (OuterVolumeSpecName: "config-data") pod "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" (UID: "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.839827 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-logs" (OuterVolumeSpecName: "logs") pod "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" (UID: "cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896222 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-config-data\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896285 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896314 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-logs\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896394 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgh92\" (UniqueName: \"kubernetes.io/projected/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-kube-api-access-tgh92\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896480 4879 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-logs\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896490 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896501 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ck22\" (UniqueName: \"kubernetes.io/projected/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-kube-api-access-2ck22\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.896511 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.897619 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-logs\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.901107 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-config-data\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.903080 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:46 crc kubenswrapper[4879]: I1125 16:56:46.912691 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgh92\" (UniqueName: \"kubernetes.io/projected/b517bafe-e7fe-41b3-bf10-a1b2dfee55c2-kube-api-access-tgh92\") pod \"nova-metadata-0\" (UID: \"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2\") " pod="openstack/nova-metadata-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.115595 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.377632 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c68eddd9-47a6-45ca-9873-636e7785a07d","Type":"ContainerStarted","Data":"bac82308beb91ebb1204218080eafe147c42422c821ef77fe0f15816b306d737"} Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.378026 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.379873 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c9c32c35-2748-44d3-9810-98ffc9c6011b","Type":"ContainerStarted","Data":"58f29e5c53717f8557a4c74ee699f784a58f65269d12b2dfd941802a7887da71"} Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.380371 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.383333 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.383349 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4","Type":"ContainerDied","Data":"fa5a8b719111bb32487f8fcb8468a2caba787332b1721cb3fe80ac270dc09dce"} Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.383401 4879 scope.go:117] "RemoveContainer" containerID="120d120230066419362a4ff48d3790b4753203ed6d6637d59dc1efcc1d1a37c3" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.389447 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"92111edc-c57b-453c-bfbb-508c29a7c0f0","Type":"ContainerDied","Data":"166b28ce10ca77985065d8dd6b0c2186643ba3c30f97cd3a02433ce0d9a7f23a"} Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.389455 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.411329 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=3.411310852 podStartE2EDuration="3.411310852s" podCreationTimestamp="2025-11-25 16:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:56:47.396565298 +0000 UTC m=+9098.999978369" watchObservedRunningTime="2025-11-25 16:56:47.411310852 +0000 UTC m=+9099.014723913" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.416741 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.4167247769999998 podStartE2EDuration="3.416724777s" podCreationTimestamp="2025-11-25 16:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:56:47.415517825 +0000 UTC m=+9099.018930916" watchObservedRunningTime="2025-11-25 16:56:47.416724777 +0000 UTC m=+9099.020137858" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.456288 4879 scope.go:117] "RemoveContainer" containerID="6aad49e0f6a4f078a1a8a1e1da15c03eba66261a5fbdcc7d890da1e9f55aa135" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.457272 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.470920 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.516065 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.530429 4879 scope.go:117] "RemoveContainer" containerID="9a41940d1d7096a7ac78292ded9aeabee98b8be50ed168573db512a271259bd8" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.534839 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.546184 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.547675 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.549810 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.570466 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.581951 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.586059 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.589555 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.596542 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.611373 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8hkf\" (UniqueName: \"kubernetes.io/projected/e63045ef-684f-4067-bf59-b762d1890157-kube-api-access-n8hkf\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.611459 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb351db7-19bf-49ee-81e8-c660932014f9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.611487 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzn2c\" (UniqueName: \"kubernetes.io/projected/cb351db7-19bf-49ee-81e8-c660932014f9-kube-api-access-rzn2c\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.611625 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e63045ef-684f-4067-bf59-b762d1890157-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.611819 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e63045ef-684f-4067-bf59-b762d1890157-config-data\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.611909 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb351db7-19bf-49ee-81e8-c660932014f9-config-data\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.612016 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e63045ef-684f-4067-bf59-b762d1890157-logs\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.622851 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.661220 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="557da3e1-38c8-4663-85cf-14b4d0ca28b2" path="/var/lib/kubelet/pods/557da3e1-38c8-4663-85cf-14b4d0ca28b2/volumes" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.662025 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92111edc-c57b-453c-bfbb-508c29a7c0f0" path="/var/lib/kubelet/pods/92111edc-c57b-453c-bfbb-508c29a7c0f0/volumes" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.663644 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4" path="/var/lib/kubelet/pods/cad4b3ef-df2b-4ea2-a02e-c161cd66cfa4/volumes" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.713985 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e63045ef-684f-4067-bf59-b762d1890157-config-data\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.714051 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb351db7-19bf-49ee-81e8-c660932014f9-config-data\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.714195 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e63045ef-684f-4067-bf59-b762d1890157-logs\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.714241 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8hkf\" (UniqueName: \"kubernetes.io/projected/e63045ef-684f-4067-bf59-b762d1890157-kube-api-access-n8hkf\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.714322 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb351db7-19bf-49ee-81e8-c660932014f9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.714360 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzn2c\" (UniqueName: \"kubernetes.io/projected/cb351db7-19bf-49ee-81e8-c660932014f9-kube-api-access-rzn2c\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.714454 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e63045ef-684f-4067-bf59-b762d1890157-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.715331 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e63045ef-684f-4067-bf59-b762d1890157-logs\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.721253 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e63045ef-684f-4067-bf59-b762d1890157-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.721561 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e63045ef-684f-4067-bf59-b762d1890157-config-data\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.723526 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb351db7-19bf-49ee-81e8-c660932014f9-config-data\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.728813 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb351db7-19bf-49ee-81e8-c660932014f9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.737956 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8hkf\" (UniqueName: \"kubernetes.io/projected/e63045ef-684f-4067-bf59-b762d1890157-kube-api-access-n8hkf\") pod \"nova-api-0\" (UID: \"e63045ef-684f-4067-bf59-b762d1890157\") " pod="openstack/nova-api-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.739964 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzn2c\" (UniqueName: \"kubernetes.io/projected/cb351db7-19bf-49ee-81e8-c660932014f9-kube-api-access-rzn2c\") pod \"nova-scheduler-0\" (UID: \"cb351db7-19bf-49ee-81e8-c660932014f9\") " pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.873794 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 25 16:56:47 crc kubenswrapper[4879]: I1125 16:56:47.905721 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 25 16:56:48 crc kubenswrapper[4879]: W1125 16:56:48.374333 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb351db7_19bf_49ee_81e8_c660932014f9.slice/crio-a9ce212b00dffbebfd019dedbe7172b75850324b46a4132c8e8f2f2340840057 WatchSource:0}: Error finding container a9ce212b00dffbebfd019dedbe7172b75850324b46a4132c8e8f2f2340840057: Status 404 returned error can't find the container with id a9ce212b00dffbebfd019dedbe7172b75850324b46a4132c8e8f2f2340840057 Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.374495 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.407727 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cb351db7-19bf-49ee-81e8-c660932014f9","Type":"ContainerStarted","Data":"a9ce212b00dffbebfd019dedbe7172b75850324b46a4132c8e8f2f2340840057"} Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.410387 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2","Type":"ContainerStarted","Data":"040e7715f6c499248f36334a65ac3a25e6a7cb8bb4b166f6aabb18b1813c6a16"} Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.410463 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2","Type":"ContainerStarted","Data":"ebc7ebfcd599f619fac4fe860e427764163888aebb335426bbe62fba74a70f3e"} Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.410486 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b517bafe-e7fe-41b3-bf10-a1b2dfee55c2","Type":"ContainerStarted","Data":"f183359898c73e8dbf206a9990bda7297146e20e637b7c161792112568a59f9e"} Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.430078 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.430056074 podStartE2EDuration="2.430056074s" podCreationTimestamp="2025-11-25 16:56:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:56:48.426768165 +0000 UTC m=+9100.030181246" watchObservedRunningTime="2025-11-25 16:56:48.430056074 +0000 UTC m=+9100.033469155" Nov 25 16:56:48 crc kubenswrapper[4879]: I1125 16:56:48.485301 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 25 16:56:48 crc kubenswrapper[4879]: W1125 16:56:48.490943 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode63045ef_684f_4067_bf59_b762d1890157.slice/crio-7fd38a79529833ed98785700098a3d880649422a073b91872c8aec3c093a6be4 WatchSource:0}: Error finding container 7fd38a79529833ed98785700098a3d880649422a073b91872c8aec3c093a6be4: Status 404 returned error can't find the container with id 7fd38a79529833ed98785700098a3d880649422a073b91872c8aec3c093a6be4 Nov 25 16:56:49 crc kubenswrapper[4879]: I1125 16:56:49.431712 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"cb351db7-19bf-49ee-81e8-c660932014f9","Type":"ContainerStarted","Data":"3d55c166f019e36ccb783f8738a06ed4d4de5a4e1016873e8a9e369815868100"} Nov 25 16:56:49 crc kubenswrapper[4879]: I1125 16:56:49.438509 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e63045ef-684f-4067-bf59-b762d1890157","Type":"ContainerStarted","Data":"3342d4d72c92a783724f51f1c719d68d5d02461b6b775f0397b0c3b1063ee2dd"} Nov 25 16:56:49 crc kubenswrapper[4879]: I1125 16:56:49.438568 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e63045ef-684f-4067-bf59-b762d1890157","Type":"ContainerStarted","Data":"091f5ec137b81c66417e953634fc21671e938ca2d3383f102ccdc708dd5b9404"} Nov 25 16:56:49 crc kubenswrapper[4879]: I1125 16:56:49.438652 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e63045ef-684f-4067-bf59-b762d1890157","Type":"ContainerStarted","Data":"7fd38a79529833ed98785700098a3d880649422a073b91872c8aec3c093a6be4"} Nov 25 16:56:49 crc kubenswrapper[4879]: I1125 16:56:49.456506 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.45647516 podStartE2EDuration="2.45647516s" podCreationTimestamp="2025-11-25 16:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:56:49.451551138 +0000 UTC m=+9101.054964299" watchObservedRunningTime="2025-11-25 16:56:49.45647516 +0000 UTC m=+9101.059888271" Nov 25 16:56:49 crc kubenswrapper[4879]: I1125 16:56:49.486253 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.486234656 podStartE2EDuration="2.486234656s" podCreationTimestamp="2025-11-25 16:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 16:56:49.478227002 +0000 UTC m=+9101.081640083" watchObservedRunningTime="2025-11-25 16:56:49.486234656 +0000 UTC m=+9101.089647727" Nov 25 16:56:52 crc kubenswrapper[4879]: I1125 16:56:52.116626 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:56:52 crc kubenswrapper[4879]: I1125 16:56:52.117268 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 25 16:56:52 crc kubenswrapper[4879]: I1125 16:56:52.874026 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 25 16:56:54 crc kubenswrapper[4879]: I1125 16:56:54.813314 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 25 16:56:54 crc kubenswrapper[4879]: I1125 16:56:54.831794 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 25 16:56:57 crc kubenswrapper[4879]: I1125 16:56:57.116397 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:56:57 crc kubenswrapper[4879]: I1125 16:56:57.117378 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 25 16:56:57 crc kubenswrapper[4879]: I1125 16:56:57.874395 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 25 16:56:57 crc kubenswrapper[4879]: I1125 16:56:57.906509 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:56:57 crc kubenswrapper[4879]: I1125 16:56:57.906581 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 25 16:56:57 crc kubenswrapper[4879]: I1125 16:56:57.912335 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.135843 4879 scope.go:117] "RemoveContainer" containerID="fda267f650614f773768ef3d578fa584b1e80bd05277b8778de733de88572bc1" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.156500 4879 scope.go:117] "RemoveContainer" containerID="05fdfe24ee067850e5cdbd9913b5de1e5c7bc167e75ff91d3953a883adfc9fac" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.199374 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b517bafe-e7fe-41b3-bf10-a1b2dfee55c2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.196:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.199852 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b517bafe-e7fe-41b3-bf10-a1b2dfee55c2" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.196:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.223244 4879 scope.go:117] "RemoveContainer" containerID="e4af1be1f3768c30a5682f2ecbf327b98258db6facb5d9a27ef610f9393e09d6" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.279651 4879 scope.go:117] "RemoveContainer" containerID="5f30862cf48037facce92754857b8bda7ce15349178b556b33f0a388d9d6d9b2" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.576781 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.989437 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e63045ef-684f-4067-bf59-b762d1890157" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:56:58 crc kubenswrapper[4879]: I1125 16:56:58.989421 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e63045ef-684f-4067-bf59-b762d1890157" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.118206 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.118722 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.120766 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.121673 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.910156 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.910468 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.910712 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.910768 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.915537 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 16:57:07 crc kubenswrapper[4879]: I1125 16:57:07.916060 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.974225 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29"] Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.975820 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979296 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979542 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-pzknf" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979727 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979747 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979830 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979867 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.979948 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 25 16:57:08 crc kubenswrapper[4879]: I1125 16:57:08.989148 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29"] Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005015 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005061 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005115 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005200 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005224 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005300 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42nlr\" (UniqueName: \"kubernetes.io/projected/6d1a0044-8107-4346-a960-be9ee93cc90c-kube-api-access-42nlr\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005372 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005389 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.005705 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.006101 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.006209 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.107897 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.107956 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.107992 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108014 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108044 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108076 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108093 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108143 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42nlr\" (UniqueName: \"kubernetes.io/projected/6d1a0044-8107-4346-a960-be9ee93cc90c-kube-api-access-42nlr\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108188 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108206 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.108255 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.109440 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.109847 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.113481 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.115785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.115898 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.116044 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.116050 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.116411 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.116500 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.116525 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.123793 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42nlr\" (UniqueName: \"kubernetes.io/projected/6d1a0044-8107-4346-a960-be9ee93cc90c-kube-api-access-42nlr\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.308813 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 16:57:09 crc kubenswrapper[4879]: I1125 16:57:09.843321 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29"] Nov 25 16:57:09 crc kubenswrapper[4879]: W1125 16:57:09.847813 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d1a0044_8107_4346_a960_be9ee93cc90c.slice/crio-e2b34928f7834b8d53f160bde9f08cb470663882eca121ec0568a42d6f9a8bf7 WatchSource:0}: Error finding container e2b34928f7834b8d53f160bde9f08cb470663882eca121ec0568a42d6f9a8bf7: Status 404 returned error can't find the container with id e2b34928f7834b8d53f160bde9f08cb470663882eca121ec0568a42d6f9a8bf7 Nov 25 16:57:10 crc kubenswrapper[4879]: I1125 16:57:10.350472 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 25 16:57:10 crc kubenswrapper[4879]: I1125 16:57:10.664426 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" event={"ID":"6d1a0044-8107-4346-a960-be9ee93cc90c","Type":"ContainerStarted","Data":"0b70643095ad8d82203db9ea3acf7781481c0d1a45ec2f897112424d471b708a"} Nov 25 16:57:10 crc kubenswrapper[4879]: I1125 16:57:10.664669 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" event={"ID":"6d1a0044-8107-4346-a960-be9ee93cc90c","Type":"ContainerStarted","Data":"e2b34928f7834b8d53f160bde9f08cb470663882eca121ec0568a42d6f9a8bf7"} Nov 25 16:57:10 crc kubenswrapper[4879]: I1125 16:57:10.690627 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" podStartSLOduration=2.193625457 podStartE2EDuration="2.690607609s" podCreationTimestamp="2025-11-25 16:57:08 +0000 UTC" firstStartedPulling="2025-11-25 16:57:09.850553343 +0000 UTC m=+9121.453966424" lastFinishedPulling="2025-11-25 16:57:10.347535505 +0000 UTC m=+9121.950948576" observedRunningTime="2025-11-25 16:57:10.681804304 +0000 UTC m=+9122.285217385" watchObservedRunningTime="2025-11-25 16:57:10.690607609 +0000 UTC m=+9122.294020690" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.415200 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jxb97"] Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.419675 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.431204 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jxb97"] Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.530661 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6xtd\" (UniqueName: \"kubernetes.io/projected/6d6d5494-ccb7-44e6-9b76-82c625d3774c-kube-api-access-k6xtd\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.530807 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-catalog-content\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.530909 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-utilities\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.632986 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-utilities\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.633137 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6xtd\" (UniqueName: \"kubernetes.io/projected/6d6d5494-ccb7-44e6-9b76-82c625d3774c-kube-api-access-k6xtd\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.633291 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-catalog-content\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.633834 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-utilities\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.633896 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-catalog-content\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.655188 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6xtd\" (UniqueName: \"kubernetes.io/projected/6d6d5494-ccb7-44e6-9b76-82c625d3774c-kube-api-access-k6xtd\") pod \"community-operators-jxb97\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:02 crc kubenswrapper[4879]: I1125 16:58:02.749250 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:03 crc kubenswrapper[4879]: I1125 16:58:03.768638 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jxb97"] Nov 25 16:58:04 crc kubenswrapper[4879]: I1125 16:58:04.257359 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerID="7789a30040ac177351459035dde8d5985234272559476aa52939747e117e88b9" exitCode=0 Nov 25 16:58:04 crc kubenswrapper[4879]: I1125 16:58:04.257427 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerDied","Data":"7789a30040ac177351459035dde8d5985234272559476aa52939747e117e88b9"} Nov 25 16:58:04 crc kubenswrapper[4879]: I1125 16:58:04.257711 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerStarted","Data":"07a35d2e42ccf7806eb7923712e713a86e3172e81cccac32d934d625ccfd5987"} Nov 25 16:58:06 crc kubenswrapper[4879]: I1125 16:58:06.278594 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerStarted","Data":"59dfd7afaeb1d745aa47dfeb9c34afddae5f04470d646564fa5a637ae4a633f5"} Nov 25 16:58:07 crc kubenswrapper[4879]: I1125 16:58:07.289880 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerID="59dfd7afaeb1d745aa47dfeb9c34afddae5f04470d646564fa5a637ae4a633f5" exitCode=0 Nov 25 16:58:07 crc kubenswrapper[4879]: I1125 16:58:07.290141 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerDied","Data":"59dfd7afaeb1d745aa47dfeb9c34afddae5f04470d646564fa5a637ae4a633f5"} Nov 25 16:58:08 crc kubenswrapper[4879]: I1125 16:58:08.302807 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerStarted","Data":"759c8a66793eb7d0977e443f5aa940c5fb0313329a7145f336ca8e1f929de1a1"} Nov 25 16:58:08 crc kubenswrapper[4879]: I1125 16:58:08.331424 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jxb97" podStartSLOduration=2.732580344 podStartE2EDuration="6.331406341s" podCreationTimestamp="2025-11-25 16:58:02 +0000 UTC" firstStartedPulling="2025-11-25 16:58:04.260323412 +0000 UTC m=+9175.863736483" lastFinishedPulling="2025-11-25 16:58:07.859149409 +0000 UTC m=+9179.462562480" observedRunningTime="2025-11-25 16:58:08.319644706 +0000 UTC m=+9179.923057807" watchObservedRunningTime="2025-11-25 16:58:08.331406341 +0000 UTC m=+9179.934819412" Nov 25 16:58:12 crc kubenswrapper[4879]: I1125 16:58:12.749876 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:12 crc kubenswrapper[4879]: I1125 16:58:12.750509 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:12 crc kubenswrapper[4879]: I1125 16:58:12.799979 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:13 crc kubenswrapper[4879]: I1125 16:58:13.410424 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:13 crc kubenswrapper[4879]: I1125 16:58:13.469706 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jxb97"] Nov 25 16:58:15 crc kubenswrapper[4879]: I1125 16:58:15.380713 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jxb97" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="registry-server" containerID="cri-o://759c8a66793eb7d0977e443f5aa940c5fb0313329a7145f336ca8e1f929de1a1" gracePeriod=2 Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.396610 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerID="759c8a66793eb7d0977e443f5aa940c5fb0313329a7145f336ca8e1f929de1a1" exitCode=0 Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.396689 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerDied","Data":"759c8a66793eb7d0977e443f5aa940c5fb0313329a7145f336ca8e1f929de1a1"} Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.584279 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.751498 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-catalog-content\") pod \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.751630 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6xtd\" (UniqueName: \"kubernetes.io/projected/6d6d5494-ccb7-44e6-9b76-82c625d3774c-kube-api-access-k6xtd\") pod \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.751866 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-utilities\") pod \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\" (UID: \"6d6d5494-ccb7-44e6-9b76-82c625d3774c\") " Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.752709 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-utilities" (OuterVolumeSpecName: "utilities") pod "6d6d5494-ccb7-44e6-9b76-82c625d3774c" (UID: "6d6d5494-ccb7-44e6-9b76-82c625d3774c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.761496 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d6d5494-ccb7-44e6-9b76-82c625d3774c-kube-api-access-k6xtd" (OuterVolumeSpecName: "kube-api-access-k6xtd") pod "6d6d5494-ccb7-44e6-9b76-82c625d3774c" (UID: "6d6d5494-ccb7-44e6-9b76-82c625d3774c"). InnerVolumeSpecName "kube-api-access-k6xtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.808273 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6d6d5494-ccb7-44e6-9b76-82c625d3774c" (UID: "6d6d5494-ccb7-44e6-9b76-82c625d3774c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.854291 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6xtd\" (UniqueName: \"kubernetes.io/projected/6d6d5494-ccb7-44e6-9b76-82c625d3774c-kube-api-access-k6xtd\") on node \"crc\" DevicePath \"\"" Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.854323 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 16:58:16 crc kubenswrapper[4879]: I1125 16:58:16.854336 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d6d5494-ccb7-44e6-9b76-82c625d3774c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.408477 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.408521 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.410266 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jxb97" event={"ID":"6d6d5494-ccb7-44e6-9b76-82c625d3774c","Type":"ContainerDied","Data":"07a35d2e42ccf7806eb7923712e713a86e3172e81cccac32d934d625ccfd5987"} Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.410304 4879 scope.go:117] "RemoveContainer" containerID="759c8a66793eb7d0977e443f5aa940c5fb0313329a7145f336ca8e1f929de1a1" Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.410423 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jxb97" Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.451523 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jxb97"] Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.457681 4879 scope.go:117] "RemoveContainer" containerID="59dfd7afaeb1d745aa47dfeb9c34afddae5f04470d646564fa5a637ae4a633f5" Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.465968 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jxb97"] Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.482265 4879 scope.go:117] "RemoveContainer" containerID="7789a30040ac177351459035dde8d5985234272559476aa52939747e117e88b9" Nov 25 16:58:17 crc kubenswrapper[4879]: I1125 16:58:17.656556 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" path="/var/lib/kubelet/pods/6d6d5494-ccb7-44e6-9b76-82c625d3774c/volumes" Nov 25 16:58:47 crc kubenswrapper[4879]: I1125 16:58:47.409263 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:58:47 crc kubenswrapper[4879]: I1125 16:58:47.409801 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:59:17 crc kubenswrapper[4879]: I1125 16:59:17.409442 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 16:59:17 crc kubenswrapper[4879]: I1125 16:59:17.410170 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 16:59:17 crc kubenswrapper[4879]: I1125 16:59:17.410244 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 16:59:17 crc kubenswrapper[4879]: I1125 16:59:17.411342 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 16:59:17 crc kubenswrapper[4879]: I1125 16:59:17.411428 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" gracePeriod=600 Nov 25 16:59:17 crc kubenswrapper[4879]: E1125 16:59:17.594931 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:59:18 crc kubenswrapper[4879]: I1125 16:59:18.027979 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" exitCode=0 Nov 25 16:59:18 crc kubenswrapper[4879]: I1125 16:59:18.028085 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152"} Nov 25 16:59:18 crc kubenswrapper[4879]: I1125 16:59:18.028359 4879 scope.go:117] "RemoveContainer" containerID="2bc4b857534032b264f4368c19adf3cbfa1defcdc0a92919ffbebf827238f945" Nov 25 16:59:18 crc kubenswrapper[4879]: I1125 16:59:18.030196 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 16:59:18 crc kubenswrapper[4879]: E1125 16:59:18.030511 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:59:32 crc kubenswrapper[4879]: I1125 16:59:32.644968 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 16:59:32 crc kubenswrapper[4879]: E1125 16:59:32.645873 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:59:47 crc kubenswrapper[4879]: I1125 16:59:47.664006 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 16:59:47 crc kubenswrapper[4879]: E1125 16:59:47.665185 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 16:59:58 crc kubenswrapper[4879]: I1125 16:59:58.645858 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 16:59:58 crc kubenswrapper[4879]: E1125 16:59:58.647272 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.164228 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws"] Nov 25 17:00:00 crc kubenswrapper[4879]: E1125 17:00:00.165186 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="extract-content" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.165204 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="extract-content" Nov 25 17:00:00 crc kubenswrapper[4879]: E1125 17:00:00.165236 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="registry-server" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.165243 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="registry-server" Nov 25 17:00:00 crc kubenswrapper[4879]: E1125 17:00:00.165288 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="extract-utilities" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.165298 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="extract-utilities" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.165559 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d6d5494-ccb7-44e6-9b76-82c625d3774c" containerName="registry-server" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.166480 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.168930 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.169266 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.176568 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws"] Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.265921 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9934cff8-09ce-4d08-8bb0-60a2edae71eb-secret-volume\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.266138 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9934cff8-09ce-4d08-8bb0-60a2edae71eb-config-volume\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.266386 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgl4h\" (UniqueName: \"kubernetes.io/projected/9934cff8-09ce-4d08-8bb0-60a2edae71eb-kube-api-access-rgl4h\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.369220 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9934cff8-09ce-4d08-8bb0-60a2edae71eb-secret-volume\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.369510 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9934cff8-09ce-4d08-8bb0-60a2edae71eb-config-volume\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.369721 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgl4h\" (UniqueName: \"kubernetes.io/projected/9934cff8-09ce-4d08-8bb0-60a2edae71eb-kube-api-access-rgl4h\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.370775 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9934cff8-09ce-4d08-8bb0-60a2edae71eb-config-volume\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.378867 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9934cff8-09ce-4d08-8bb0-60a2edae71eb-secret-volume\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.388910 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgl4h\" (UniqueName: \"kubernetes.io/projected/9934cff8-09ce-4d08-8bb0-60a2edae71eb-kube-api-access-rgl4h\") pod \"collect-profiles-29401500-44rws\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.491815 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:00 crc kubenswrapper[4879]: I1125 17:00:00.977648 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws"] Nov 25 17:00:00 crc kubenswrapper[4879]: W1125 17:00:00.979225 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9934cff8_09ce_4d08_8bb0_60a2edae71eb.slice/crio-c674413efde393700af1e122f78e3e115eac78228d4387cef71482be253a6ebe WatchSource:0}: Error finding container c674413efde393700af1e122f78e3e115eac78228d4387cef71482be253a6ebe: Status 404 returned error can't find the container with id c674413efde393700af1e122f78e3e115eac78228d4387cef71482be253a6ebe Nov 25 17:00:01 crc kubenswrapper[4879]: I1125 17:00:01.470095 4879 generic.go:334] "Generic (PLEG): container finished" podID="9934cff8-09ce-4d08-8bb0-60a2edae71eb" containerID="d933f1f790292a5bb9480cc42c6d234ae12267e6c2cdfe11a989ca90591f534c" exitCode=0 Nov 25 17:00:01 crc kubenswrapper[4879]: I1125 17:00:01.470158 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" event={"ID":"9934cff8-09ce-4d08-8bb0-60a2edae71eb","Type":"ContainerDied","Data":"d933f1f790292a5bb9480cc42c6d234ae12267e6c2cdfe11a989ca90591f534c"} Nov 25 17:00:01 crc kubenswrapper[4879]: I1125 17:00:01.470421 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" event={"ID":"9934cff8-09ce-4d08-8bb0-60a2edae71eb","Type":"ContainerStarted","Data":"c674413efde393700af1e122f78e3e115eac78228d4387cef71482be253a6ebe"} Nov 25 17:00:02 crc kubenswrapper[4879]: I1125 17:00:02.894679 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.029307 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9934cff8-09ce-4d08-8bb0-60a2edae71eb-config-volume\") pod \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.029398 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgl4h\" (UniqueName: \"kubernetes.io/projected/9934cff8-09ce-4d08-8bb0-60a2edae71eb-kube-api-access-rgl4h\") pod \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.029497 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9934cff8-09ce-4d08-8bb0-60a2edae71eb-secret-volume\") pod \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\" (UID: \"9934cff8-09ce-4d08-8bb0-60a2edae71eb\") " Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.030113 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9934cff8-09ce-4d08-8bb0-60a2edae71eb-config-volume" (OuterVolumeSpecName: "config-volume") pod "9934cff8-09ce-4d08-8bb0-60a2edae71eb" (UID: "9934cff8-09ce-4d08-8bb0-60a2edae71eb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.030751 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9934cff8-09ce-4d08-8bb0-60a2edae71eb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.042404 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9934cff8-09ce-4d08-8bb0-60a2edae71eb-kube-api-access-rgl4h" (OuterVolumeSpecName: "kube-api-access-rgl4h") pod "9934cff8-09ce-4d08-8bb0-60a2edae71eb" (UID: "9934cff8-09ce-4d08-8bb0-60a2edae71eb"). InnerVolumeSpecName "kube-api-access-rgl4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.065337 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9934cff8-09ce-4d08-8bb0-60a2edae71eb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9934cff8-09ce-4d08-8bb0-60a2edae71eb" (UID: "9934cff8-09ce-4d08-8bb0-60a2edae71eb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.132787 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgl4h\" (UniqueName: \"kubernetes.io/projected/9934cff8-09ce-4d08-8bb0-60a2edae71eb-kube-api-access-rgl4h\") on node \"crc\" DevicePath \"\"" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.133023 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9934cff8-09ce-4d08-8bb0-60a2edae71eb-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.493816 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" event={"ID":"9934cff8-09ce-4d08-8bb0-60a2edae71eb","Type":"ContainerDied","Data":"c674413efde393700af1e122f78e3e115eac78228d4387cef71482be253a6ebe"} Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.493857 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c674413efde393700af1e122f78e3e115eac78228d4387cef71482be253a6ebe" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.493867 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401500-44rws" Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.975786 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz"] Nov 25 17:00:03 crc kubenswrapper[4879]: I1125 17:00:03.986632 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401455-grdjz"] Nov 25 17:00:05 crc kubenswrapper[4879]: I1125 17:00:05.664884 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4f899e7-0e42-4f2b-878b-cf2c502f6bf4" path="/var/lib/kubelet/pods/a4f899e7-0e42-4f2b-878b-cf2c502f6bf4/volumes" Nov 25 17:00:09 crc kubenswrapper[4879]: I1125 17:00:09.653324 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:00:09 crc kubenswrapper[4879]: E1125 17:00:09.654066 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:00:21 crc kubenswrapper[4879]: I1125 17:00:21.644786 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:00:21 crc kubenswrapper[4879]: E1125 17:00:21.645729 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:00:32 crc kubenswrapper[4879]: I1125 17:00:32.644753 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:00:32 crc kubenswrapper[4879]: E1125 17:00:32.645550 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.839567 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-89bws"] Nov 25 17:00:40 crc kubenswrapper[4879]: E1125 17:00:40.840828 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9934cff8-09ce-4d08-8bb0-60a2edae71eb" containerName="collect-profiles" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.840848 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9934cff8-09ce-4d08-8bb0-60a2edae71eb" containerName="collect-profiles" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.841162 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9934cff8-09ce-4d08-8bb0-60a2edae71eb" containerName="collect-profiles" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.843264 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.855729 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-89bws"] Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.987397 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54hdf\" (UniqueName: \"kubernetes.io/projected/e512e26e-1e77-4d24-93f1-bb92f3b5db00-kube-api-access-54hdf\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.987763 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-catalog-content\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:40 crc kubenswrapper[4879]: I1125 17:00:40.988338 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-utilities\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.090031 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54hdf\" (UniqueName: \"kubernetes.io/projected/e512e26e-1e77-4d24-93f1-bb92f3b5db00-kube-api-access-54hdf\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.090099 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-catalog-content\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.090239 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-utilities\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.091046 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-catalog-content\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.091372 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-utilities\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.115168 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54hdf\" (UniqueName: \"kubernetes.io/projected/e512e26e-1e77-4d24-93f1-bb92f3b5db00-kube-api-access-54hdf\") pod \"redhat-marketplace-89bws\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.176510 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.672946 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-89bws"] Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.934891 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerStarted","Data":"a5b87f3612d11ed6f3ede11688c104566040077b3543d56d19e4d619becd6ac8"} Nov 25 17:00:41 crc kubenswrapper[4879]: I1125 17:00:41.935532 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerStarted","Data":"7e715de3fdf02861c9a226422248258dc81e41db77bc9cf9f1ed8d4d76ddcce0"} Nov 25 17:00:42 crc kubenswrapper[4879]: I1125 17:00:42.946999 4879 generic.go:334] "Generic (PLEG): container finished" podID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerID="a5b87f3612d11ed6f3ede11688c104566040077b3543d56d19e4d619becd6ac8" exitCode=0 Nov 25 17:00:42 crc kubenswrapper[4879]: I1125 17:00:42.947051 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerDied","Data":"a5b87f3612d11ed6f3ede11688c104566040077b3543d56d19e4d619becd6ac8"} Nov 25 17:00:43 crc kubenswrapper[4879]: I1125 17:00:43.975406 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerStarted","Data":"5e2fcafa21edaf5d2c8fcb1263407bbff59808bddd93cc18ff2f4bafedbdf443"} Nov 25 17:00:44 crc kubenswrapper[4879]: I1125 17:00:44.991414 4879 generic.go:334] "Generic (PLEG): container finished" podID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerID="5e2fcafa21edaf5d2c8fcb1263407bbff59808bddd93cc18ff2f4bafedbdf443" exitCode=0 Nov 25 17:00:44 crc kubenswrapper[4879]: I1125 17:00:44.991519 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerDied","Data":"5e2fcafa21edaf5d2c8fcb1263407bbff59808bddd93cc18ff2f4bafedbdf443"} Nov 25 17:00:45 crc kubenswrapper[4879]: I1125 17:00:45.644913 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:00:45 crc kubenswrapper[4879]: E1125 17:00:45.645431 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:00:46 crc kubenswrapper[4879]: I1125 17:00:46.004762 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerStarted","Data":"24f5e2e4674752337415313b502d5363f20fc0bc2459fda07a7349a83256865d"} Nov 25 17:00:46 crc kubenswrapper[4879]: I1125 17:00:46.033794 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-89bws" podStartSLOduration=3.271629396 podStartE2EDuration="6.033774358s" podCreationTimestamp="2025-11-25 17:00:40 +0000 UTC" firstStartedPulling="2025-11-25 17:00:42.949561697 +0000 UTC m=+9334.552974768" lastFinishedPulling="2025-11-25 17:00:45.711706659 +0000 UTC m=+9337.315119730" observedRunningTime="2025-11-25 17:00:46.027081427 +0000 UTC m=+9337.630494498" watchObservedRunningTime="2025-11-25 17:00:46.033774358 +0000 UTC m=+9337.637187429" Nov 25 17:00:51 crc kubenswrapper[4879]: I1125 17:00:51.176773 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:51 crc kubenswrapper[4879]: I1125 17:00:51.177241 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:51 crc kubenswrapper[4879]: I1125 17:00:51.230664 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:52 crc kubenswrapper[4879]: I1125 17:00:52.128544 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:52 crc kubenswrapper[4879]: I1125 17:00:52.189690 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-89bws"] Nov 25 17:00:54 crc kubenswrapper[4879]: I1125 17:00:54.090412 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-89bws" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="registry-server" containerID="cri-o://24f5e2e4674752337415313b502d5363f20fc0bc2459fda07a7349a83256865d" gracePeriod=2 Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.105323 4879 generic.go:334] "Generic (PLEG): container finished" podID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerID="24f5e2e4674752337415313b502d5363f20fc0bc2459fda07a7349a83256865d" exitCode=0 Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.105401 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerDied","Data":"24f5e2e4674752337415313b502d5363f20fc0bc2459fda07a7349a83256865d"} Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.105764 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89bws" event={"ID":"e512e26e-1e77-4d24-93f1-bb92f3b5db00","Type":"ContainerDied","Data":"7e715de3fdf02861c9a226422248258dc81e41db77bc9cf9f1ed8d4d76ddcce0"} Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.105785 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e715de3fdf02861c9a226422248258dc81e41db77bc9cf9f1ed8d4d76ddcce0" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.167972 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.226069 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-utilities\") pod \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.226158 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-catalog-content\") pod \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.226228 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54hdf\" (UniqueName: \"kubernetes.io/projected/e512e26e-1e77-4d24-93f1-bb92f3b5db00-kube-api-access-54hdf\") pod \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\" (UID: \"e512e26e-1e77-4d24-93f1-bb92f3b5db00\") " Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.227277 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-utilities" (OuterVolumeSpecName: "utilities") pod "e512e26e-1e77-4d24-93f1-bb92f3b5db00" (UID: "e512e26e-1e77-4d24-93f1-bb92f3b5db00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.234302 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e512e26e-1e77-4d24-93f1-bb92f3b5db00-kube-api-access-54hdf" (OuterVolumeSpecName: "kube-api-access-54hdf") pod "e512e26e-1e77-4d24-93f1-bb92f3b5db00" (UID: "e512e26e-1e77-4d24-93f1-bb92f3b5db00"). InnerVolumeSpecName "kube-api-access-54hdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.244749 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e512e26e-1e77-4d24-93f1-bb92f3b5db00" (UID: "e512e26e-1e77-4d24-93f1-bb92f3b5db00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.328962 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.329005 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e512e26e-1e77-4d24-93f1-bb92f3b5db00-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:00:55 crc kubenswrapper[4879]: I1125 17:00:55.329022 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54hdf\" (UniqueName: \"kubernetes.io/projected/e512e26e-1e77-4d24-93f1-bb92f3b5db00-kube-api-access-54hdf\") on node \"crc\" DevicePath \"\"" Nov 25 17:00:56 crc kubenswrapper[4879]: I1125 17:00:56.120897 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89bws" Nov 25 17:00:56 crc kubenswrapper[4879]: I1125 17:00:56.147895 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-89bws"] Nov 25 17:00:56 crc kubenswrapper[4879]: I1125 17:00:56.161336 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-89bws"] Nov 25 17:00:57 crc kubenswrapper[4879]: I1125 17:00:57.655542 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" path="/var/lib/kubelet/pods/e512e26e-1e77-4d24-93f1-bb92f3b5db00/volumes" Nov 25 17:00:58 crc kubenswrapper[4879]: I1125 17:00:58.537304 4879 scope.go:117] "RemoveContainer" containerID="882549109ed4c91ea920d51d52f54a9ff25e356295f39b079a3521b97fa3e255" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.152011 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29401501-fgvcs"] Nov 25 17:01:00 crc kubenswrapper[4879]: E1125 17:01:00.152719 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="extract-utilities" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.152732 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="extract-utilities" Nov 25 17:01:00 crc kubenswrapper[4879]: E1125 17:01:00.152790 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="extract-content" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.152796 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="extract-content" Nov 25 17:01:00 crc kubenswrapper[4879]: E1125 17:01:00.152806 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="registry-server" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.152812 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="registry-server" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.153360 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="e512e26e-1e77-4d24-93f1-bb92f3b5db00" containerName="registry-server" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.154231 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.165729 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29401501-fgvcs"] Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.248451 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-combined-ca-bundle\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.248543 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-config-data\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.248590 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-fernet-keys\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.248629 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv58q\" (UniqueName: \"kubernetes.io/projected/eea642a0-9caf-483e-865b-c61b1300d15d-kube-api-access-vv58q\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.350851 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-fernet-keys\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.350911 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv58q\" (UniqueName: \"kubernetes.io/projected/eea642a0-9caf-483e-865b-c61b1300d15d-kube-api-access-vv58q\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.351060 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-combined-ca-bundle\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.351102 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-config-data\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.357304 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-combined-ca-bundle\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.357995 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-fernet-keys\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.358268 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-config-data\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.366758 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv58q\" (UniqueName: \"kubernetes.io/projected/eea642a0-9caf-483e-865b-c61b1300d15d-kube-api-access-vv58q\") pod \"keystone-cron-29401501-fgvcs\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.480504 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.645201 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:01:00 crc kubenswrapper[4879]: E1125 17:01:00.645481 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:01:00 crc kubenswrapper[4879]: W1125 17:01:00.945339 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeea642a0_9caf_483e_865b_c61b1300d15d.slice/crio-33bd789e3c222f4de79731f84e2d1d810a0aba90d312d1376504db0724edbc88 WatchSource:0}: Error finding container 33bd789e3c222f4de79731f84e2d1d810a0aba90d312d1376504db0724edbc88: Status 404 returned error can't find the container with id 33bd789e3c222f4de79731f84e2d1d810a0aba90d312d1376504db0724edbc88 Nov 25 17:01:00 crc kubenswrapper[4879]: I1125 17:01:00.945523 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29401501-fgvcs"] Nov 25 17:01:01 crc kubenswrapper[4879]: I1125 17:01:01.177734 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401501-fgvcs" event={"ID":"eea642a0-9caf-483e-865b-c61b1300d15d","Type":"ContainerStarted","Data":"13d2e0a529b7344a34b1f5858810d657f0dd7799aab9c302dd6e6552dd5c7706"} Nov 25 17:01:01 crc kubenswrapper[4879]: I1125 17:01:01.178348 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401501-fgvcs" event={"ID":"eea642a0-9caf-483e-865b-c61b1300d15d","Type":"ContainerStarted","Data":"33bd789e3c222f4de79731f84e2d1d810a0aba90d312d1376504db0724edbc88"} Nov 25 17:01:01 crc kubenswrapper[4879]: I1125 17:01:01.196895 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29401501-fgvcs" podStartSLOduration=1.196874457 podStartE2EDuration="1.196874457s" podCreationTimestamp="2025-11-25 17:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 17:01:01.193207538 +0000 UTC m=+9352.796620609" watchObservedRunningTime="2025-11-25 17:01:01.196874457 +0000 UTC m=+9352.800287528" Nov 25 17:01:05 crc kubenswrapper[4879]: I1125 17:01:05.221875 4879 generic.go:334] "Generic (PLEG): container finished" podID="eea642a0-9caf-483e-865b-c61b1300d15d" containerID="13d2e0a529b7344a34b1f5858810d657f0dd7799aab9c302dd6e6552dd5c7706" exitCode=0 Nov 25 17:01:05 crc kubenswrapper[4879]: I1125 17:01:05.221965 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401501-fgvcs" event={"ID":"eea642a0-9caf-483e-865b-c61b1300d15d","Type":"ContainerDied","Data":"13d2e0a529b7344a34b1f5858810d657f0dd7799aab9c302dd6e6552dd5c7706"} Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.077967 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.204553 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vv58q\" (UniqueName: \"kubernetes.io/projected/eea642a0-9caf-483e-865b-c61b1300d15d-kube-api-access-vv58q\") pod \"eea642a0-9caf-483e-865b-c61b1300d15d\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.204692 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-fernet-keys\") pod \"eea642a0-9caf-483e-865b-c61b1300d15d\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.204764 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-combined-ca-bundle\") pod \"eea642a0-9caf-483e-865b-c61b1300d15d\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.204846 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-config-data\") pod \"eea642a0-9caf-483e-865b-c61b1300d15d\" (UID: \"eea642a0-9caf-483e-865b-c61b1300d15d\") " Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.211582 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "eea642a0-9caf-483e-865b-c61b1300d15d" (UID: "eea642a0-9caf-483e-865b-c61b1300d15d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.212386 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea642a0-9caf-483e-865b-c61b1300d15d-kube-api-access-vv58q" (OuterVolumeSpecName: "kube-api-access-vv58q") pod "eea642a0-9caf-483e-865b-c61b1300d15d" (UID: "eea642a0-9caf-483e-865b-c61b1300d15d"). InnerVolumeSpecName "kube-api-access-vv58q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.249654 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29401501-fgvcs" event={"ID":"eea642a0-9caf-483e-865b-c61b1300d15d","Type":"ContainerDied","Data":"33bd789e3c222f4de79731f84e2d1d810a0aba90d312d1376504db0724edbc88"} Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.249696 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33bd789e3c222f4de79731f84e2d1d810a0aba90d312d1376504db0724edbc88" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.249753 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29401501-fgvcs" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.265682 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eea642a0-9caf-483e-865b-c61b1300d15d" (UID: "eea642a0-9caf-483e-865b-c61b1300d15d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.303261 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-config-data" (OuterVolumeSpecName: "config-data") pod "eea642a0-9caf-483e-865b-c61b1300d15d" (UID: "eea642a0-9caf-483e-865b-c61b1300d15d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.309871 4879 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.309909 4879 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.309924 4879 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea642a0-9caf-483e-865b-c61b1300d15d-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:07 crc kubenswrapper[4879]: I1125 17:01:07.310015 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vv58q\" (UniqueName: \"kubernetes.io/projected/eea642a0-9caf-483e-865b-c61b1300d15d-kube-api-access-vv58q\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:15 crc kubenswrapper[4879]: I1125 17:01:15.644783 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:01:15 crc kubenswrapper[4879]: E1125 17:01:15.645695 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:01:26 crc kubenswrapper[4879]: I1125 17:01:26.645309 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:01:26 crc kubenswrapper[4879]: E1125 17:01:26.646348 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:01:39 crc kubenswrapper[4879]: I1125 17:01:39.653745 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:01:39 crc kubenswrapper[4879]: E1125 17:01:39.654575 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:01:41 crc kubenswrapper[4879]: I1125 17:01:41.600927 4879 generic.go:334] "Generic (PLEG): container finished" podID="6d1a0044-8107-4346-a960-be9ee93cc90c" containerID="0b70643095ad8d82203db9ea3acf7781481c0d1a45ec2f897112424d471b708a" exitCode=0 Nov 25 17:01:41 crc kubenswrapper[4879]: I1125 17:01:41.601022 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" event={"ID":"6d1a0044-8107-4346-a960-be9ee93cc90c","Type":"ContainerDied","Data":"0b70643095ad8d82203db9ea3acf7781481c0d1a45ec2f897112424d471b708a"} Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.076674 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221044 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-0\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221334 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-inventory\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221390 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-1\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221463 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-1\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221489 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ssh-key\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221963 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-combined-ca-bundle\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.221995 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-0\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.222031 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ceph\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.222104 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-1\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.222172 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42nlr\" (UniqueName: \"kubernetes.io/projected/6d1a0044-8107-4346-a960-be9ee93cc90c-kube-api-access-42nlr\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.222212 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-0\") pod \"6d1a0044-8107-4346-a960-be9ee93cc90c\" (UID: \"6d1a0044-8107-4346-a960-be9ee93cc90c\") " Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.226506 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ceph" (OuterVolumeSpecName: "ceph") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.228561 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.230076 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d1a0044-8107-4346-a960-be9ee93cc90c-kube-api-access-42nlr" (OuterVolumeSpecName: "kube-api-access-42nlr") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "kube-api-access-42nlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.253319 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.262242 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.263883 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-inventory" (OuterVolumeSpecName: "inventory") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.264052 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.264348 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.265399 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.265917 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.267361 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "6d1a0044-8107-4346-a960-be9ee93cc90c" (UID: "6d1a0044-8107-4346-a960-be9ee93cc90c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325856 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325926 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42nlr\" (UniqueName: \"kubernetes.io/projected/6d1a0044-8107-4346-a960-be9ee93cc90c-kube-api-access-42nlr\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325938 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325950 4879 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325959 4879 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-inventory\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325968 4879 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325976 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325985 4879 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.325993 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.326002 4879 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/6d1a0044-8107-4346-a960-be9ee93cc90c-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.326010 4879 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/6d1a0044-8107-4346-a960-be9ee93cc90c-ceph\") on node \"crc\" DevicePath \"\"" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.622008 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" event={"ID":"6d1a0044-8107-4346-a960-be9ee93cc90c","Type":"ContainerDied","Data":"e2b34928f7834b8d53f160bde9f08cb470663882eca121ec0568a42d6f9a8bf7"} Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.622046 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2b34928f7834b8d53f160bde9f08cb470663882eca121ec0568a42d6f9a8bf7" Nov 25 17:01:43 crc kubenswrapper[4879]: I1125 17:01:43.622077 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29" Nov 25 17:01:54 crc kubenswrapper[4879]: I1125 17:01:54.645690 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:01:54 crc kubenswrapper[4879]: E1125 17:01:54.666049 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:02:07 crc kubenswrapper[4879]: I1125 17:02:07.649344 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:02:07 crc kubenswrapper[4879]: E1125 17:02:07.650110 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:02:20 crc kubenswrapper[4879]: I1125 17:02:20.645444 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:02:20 crc kubenswrapper[4879]: E1125 17:02:20.646210 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:02:31 crc kubenswrapper[4879]: I1125 17:02:31.645201 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:02:31 crc kubenswrapper[4879]: E1125 17:02:31.645998 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:02:44 crc kubenswrapper[4879]: I1125 17:02:44.645327 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:02:44 crc kubenswrapper[4879]: E1125 17:02:44.645983 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:02:59 crc kubenswrapper[4879]: I1125 17:02:59.644765 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:02:59 crc kubenswrapper[4879]: E1125 17:02:59.645675 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:03:13 crc kubenswrapper[4879]: I1125 17:03:13.645282 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:03:13 crc kubenswrapper[4879]: E1125 17:03:13.646048 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:03:24 crc kubenswrapper[4879]: I1125 17:03:24.645096 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:03:24 crc kubenswrapper[4879]: E1125 17:03:24.646993 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:03:38 crc kubenswrapper[4879]: I1125 17:03:38.644692 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:03:38 crc kubenswrapper[4879]: E1125 17:03:38.645432 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:03:49 crc kubenswrapper[4879]: I1125 17:03:49.652546 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:03:49 crc kubenswrapper[4879]: E1125 17:03:49.653333 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:04:04 crc kubenswrapper[4879]: I1125 17:04:04.645246 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:04:04 crc kubenswrapper[4879]: E1125 17:04:04.646179 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.893458 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbjbv/must-gather-tw82t"] Nov 25 17:04:12 crc kubenswrapper[4879]: E1125 17:04:12.894662 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea642a0-9caf-483e-865b-c61b1300d15d" containerName="keystone-cron" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.894683 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea642a0-9caf-483e-865b-c61b1300d15d" containerName="keystone-cron" Nov 25 17:04:12 crc kubenswrapper[4879]: E1125 17:04:12.894698 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d1a0044-8107-4346-a960-be9ee93cc90c" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.894707 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d1a0044-8107-4346-a960-be9ee93cc90c" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.894994 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d1a0044-8107-4346-a960-be9ee93cc90c" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.895041 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea642a0-9caf-483e-865b-c61b1300d15d" containerName="keystone-cron" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.896617 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.902052 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wbjbv"/"default-dockercfg-rlt66" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.902370 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wbjbv"/"openshift-service-ca.crt" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.902452 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wbjbv"/"kube-root-ca.crt" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.919157 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wbjbv/must-gather-tw82t"] Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.991185 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dthsh\" (UniqueName: \"kubernetes.io/projected/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-kube-api-access-dthsh\") pod \"must-gather-tw82t\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:12 crc kubenswrapper[4879]: I1125 17:04:12.991248 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-must-gather-output\") pod \"must-gather-tw82t\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.093817 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dthsh\" (UniqueName: \"kubernetes.io/projected/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-kube-api-access-dthsh\") pod \"must-gather-tw82t\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.093886 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-must-gather-output\") pod \"must-gather-tw82t\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.094423 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-must-gather-output\") pod \"must-gather-tw82t\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.112305 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dthsh\" (UniqueName: \"kubernetes.io/projected/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-kube-api-access-dthsh\") pod \"must-gather-tw82t\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.220828 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.685658 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wbjbv/must-gather-tw82t"] Nov 25 17:04:13 crc kubenswrapper[4879]: I1125 17:04:13.689988 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 17:04:14 crc kubenswrapper[4879]: I1125 17:04:14.235638 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/must-gather-tw82t" event={"ID":"9b12d2af-292c-4c47-8b96-7b9ef3bda01a","Type":"ContainerStarted","Data":"b88d7d8a61ff2df50484e32ffe06c0cb61edd7921a3fd36b464e34b2748ed3b0"} Nov 25 17:04:15 crc kubenswrapper[4879]: I1125 17:04:15.645730 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:04:15 crc kubenswrapper[4879]: E1125 17:04:15.646334 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:04:20 crc kubenswrapper[4879]: I1125 17:04:20.348680 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/must-gather-tw82t" event={"ID":"9b12d2af-292c-4c47-8b96-7b9ef3bda01a","Type":"ContainerStarted","Data":"655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369"} Nov 25 17:04:21 crc kubenswrapper[4879]: I1125 17:04:21.366849 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/must-gather-tw82t" event={"ID":"9b12d2af-292c-4c47-8b96-7b9ef3bda01a","Type":"ContainerStarted","Data":"eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0"} Nov 25 17:04:21 crc kubenswrapper[4879]: I1125 17:04:21.384209 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wbjbv/must-gather-tw82t" podStartSLOduration=3.087945208 podStartE2EDuration="9.384191795s" podCreationTimestamp="2025-11-25 17:04:12 +0000 UTC" firstStartedPulling="2025-11-25 17:04:13.689699317 +0000 UTC m=+9545.293112388" lastFinishedPulling="2025-11-25 17:04:19.985945904 +0000 UTC m=+9551.589358975" observedRunningTime="2025-11-25 17:04:21.380846205 +0000 UTC m=+9552.984259276" watchObservedRunningTime="2025-11-25 17:04:21.384191795 +0000 UTC m=+9552.987604866" Nov 25 17:04:24 crc kubenswrapper[4879]: E1125 17:04:24.203795 4879 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.190:35466->38.102.83.190:33827: read tcp 38.102.83.190:35466->38.102.83.190:33827: read: connection reset by peer Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.049950 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbjbv/crc-debug-7cx6q"] Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.052410 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.188677 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/876ec766-c71d-4898-a272-a21da2a61c7d-host\") pod \"crc-debug-7cx6q\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.188942 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvnpn\" (UniqueName: \"kubernetes.io/projected/876ec766-c71d-4898-a272-a21da2a61c7d-kube-api-access-wvnpn\") pod \"crc-debug-7cx6q\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.291893 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/876ec766-c71d-4898-a272-a21da2a61c7d-host\") pod \"crc-debug-7cx6q\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.292570 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvnpn\" (UniqueName: \"kubernetes.io/projected/876ec766-c71d-4898-a272-a21da2a61c7d-kube-api-access-wvnpn\") pod \"crc-debug-7cx6q\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.292096 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/876ec766-c71d-4898-a272-a21da2a61c7d-host\") pod \"crc-debug-7cx6q\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.314194 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvnpn\" (UniqueName: \"kubernetes.io/projected/876ec766-c71d-4898-a272-a21da2a61c7d-kube-api-access-wvnpn\") pod \"crc-debug-7cx6q\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:25 crc kubenswrapper[4879]: I1125 17:04:25.378897 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:04:26 crc kubenswrapper[4879]: I1125 17:04:26.417431 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" event={"ID":"876ec766-c71d-4898-a272-a21da2a61c7d","Type":"ContainerStarted","Data":"be0663700a6065e3203bd8bb35153ab8af4d4fe968633107f6e145dbcce6dcd3"} Nov 25 17:04:29 crc kubenswrapper[4879]: I1125 17:04:29.652390 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:04:30 crc kubenswrapper[4879]: I1125 17:04:30.499798 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"743bd168195d960d26df5ccd667c43d47dd9ab1fd42cf6572baf38f0ee082d39"} Nov 25 17:04:43 crc kubenswrapper[4879]: E1125 17:04:43.072005 4879 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Nov 25 17:04:43 crc kubenswrapper[4879]: E1125 17:04:43.075968 4879 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wvnpn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-7cx6q_openshift-must-gather-wbjbv(876ec766-c71d-4898-a272-a21da2a61c7d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 25 17:04:43 crc kubenswrapper[4879]: E1125 17:04:43.077497 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" podUID="876ec766-c71d-4898-a272-a21da2a61c7d" Nov 25 17:04:43 crc kubenswrapper[4879]: E1125 17:04:43.641899 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" podUID="876ec766-c71d-4898-a272-a21da2a61c7d" Nov 25 17:04:57 crc kubenswrapper[4879]: I1125 17:04:57.777183 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" event={"ID":"876ec766-c71d-4898-a272-a21da2a61c7d","Type":"ContainerStarted","Data":"6568525e8b2d6e68e9890d4607d86e1c72542134ec5bc058f47cc26f60d3fbc6"} Nov 25 17:04:57 crc kubenswrapper[4879]: I1125 17:04:57.798271 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" podStartSLOduration=1.071118054 podStartE2EDuration="32.798193431s" podCreationTimestamp="2025-11-25 17:04:25 +0000 UTC" firstStartedPulling="2025-11-25 17:04:25.426702809 +0000 UTC m=+9557.030115880" lastFinishedPulling="2025-11-25 17:04:57.153778196 +0000 UTC m=+9588.757191257" observedRunningTime="2025-11-25 17:04:57.789910357 +0000 UTC m=+9589.393323448" watchObservedRunningTime="2025-11-25 17:04:57.798193431 +0000 UTC m=+9589.401606502" Nov 25 17:05:22 crc kubenswrapper[4879]: I1125 17:05:22.346640 4879 generic.go:334] "Generic (PLEG): container finished" podID="876ec766-c71d-4898-a272-a21da2a61c7d" containerID="6568525e8b2d6e68e9890d4607d86e1c72542134ec5bc058f47cc26f60d3fbc6" exitCode=0 Nov 25 17:05:22 crc kubenswrapper[4879]: I1125 17:05:22.346719 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" event={"ID":"876ec766-c71d-4898-a272-a21da2a61c7d","Type":"ContainerDied","Data":"6568525e8b2d6e68e9890d4607d86e1c72542134ec5bc058f47cc26f60d3fbc6"} Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.489941 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.532547 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbjbv/crc-debug-7cx6q"] Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.542688 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbjbv/crc-debug-7cx6q"] Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.563270 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/876ec766-c71d-4898-a272-a21da2a61c7d-host\") pod \"876ec766-c71d-4898-a272-a21da2a61c7d\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.563401 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/876ec766-c71d-4898-a272-a21da2a61c7d-host" (OuterVolumeSpecName: "host") pod "876ec766-c71d-4898-a272-a21da2a61c7d" (UID: "876ec766-c71d-4898-a272-a21da2a61c7d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.563544 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvnpn\" (UniqueName: \"kubernetes.io/projected/876ec766-c71d-4898-a272-a21da2a61c7d-kube-api-access-wvnpn\") pod \"876ec766-c71d-4898-a272-a21da2a61c7d\" (UID: \"876ec766-c71d-4898-a272-a21da2a61c7d\") " Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.564045 4879 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/876ec766-c71d-4898-a272-a21da2a61c7d-host\") on node \"crc\" DevicePath \"\"" Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.570400 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/876ec766-c71d-4898-a272-a21da2a61c7d-kube-api-access-wvnpn" (OuterVolumeSpecName: "kube-api-access-wvnpn") pod "876ec766-c71d-4898-a272-a21da2a61c7d" (UID: "876ec766-c71d-4898-a272-a21da2a61c7d"). InnerVolumeSpecName "kube-api-access-wvnpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.658955 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="876ec766-c71d-4898-a272-a21da2a61c7d" path="/var/lib/kubelet/pods/876ec766-c71d-4898-a272-a21da2a61c7d/volumes" Nov 25 17:05:23 crc kubenswrapper[4879]: I1125 17:05:23.667676 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvnpn\" (UniqueName: \"kubernetes.io/projected/876ec766-c71d-4898-a272-a21da2a61c7d-kube-api-access-wvnpn\") on node \"crc\" DevicePath \"\"" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.374690 4879 scope.go:117] "RemoveContainer" containerID="6568525e8b2d6e68e9890d4607d86e1c72542134ec5bc058f47cc26f60d3fbc6" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.374757 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-7cx6q" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.720372 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wbjbv/crc-debug-xsgp5"] Nov 25 17:05:24 crc kubenswrapper[4879]: E1125 17:05:24.720956 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="876ec766-c71d-4898-a272-a21da2a61c7d" containerName="container-00" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.720971 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="876ec766-c71d-4898-a272-a21da2a61c7d" containerName="container-00" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.721273 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="876ec766-c71d-4898-a272-a21da2a61c7d" containerName="container-00" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.722247 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.793623 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-host\") pod \"crc-debug-xsgp5\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.793801 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x25dq\" (UniqueName: \"kubernetes.io/projected/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-kube-api-access-x25dq\") pod \"crc-debug-xsgp5\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.897270 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-host\") pod \"crc-debug-xsgp5\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.897428 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x25dq\" (UniqueName: \"kubernetes.io/projected/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-kube-api-access-x25dq\") pod \"crc-debug-xsgp5\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.897447 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-host\") pod \"crc-debug-xsgp5\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:24 crc kubenswrapper[4879]: I1125 17:05:24.917525 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x25dq\" (UniqueName: \"kubernetes.io/projected/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-kube-api-access-x25dq\") pod \"crc-debug-xsgp5\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:25 crc kubenswrapper[4879]: I1125 17:05:25.041603 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:25 crc kubenswrapper[4879]: I1125 17:05:25.386020 4879 generic.go:334] "Generic (PLEG): container finished" podID="d7b47f9a-8cce-4b65-a21a-b1b761f8febe" containerID="0f65457247b5452dbee9b5c5f27266d119fd9958fad463a116ee9dc11241a82f" exitCode=1 Nov 25 17:05:25 crc kubenswrapper[4879]: I1125 17:05:25.386150 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" event={"ID":"d7b47f9a-8cce-4b65-a21a-b1b761f8febe","Type":"ContainerDied","Data":"0f65457247b5452dbee9b5c5f27266d119fd9958fad463a116ee9dc11241a82f"} Nov 25 17:05:25 crc kubenswrapper[4879]: I1125 17:05:25.386184 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" event={"ID":"d7b47f9a-8cce-4b65-a21a-b1b761f8febe","Type":"ContainerStarted","Data":"f8357740c312a08f7c2c233db0bfeb5c75fc39767d3e721ff07cee9afbfd47e2"} Nov 25 17:05:25 crc kubenswrapper[4879]: I1125 17:05:25.429077 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbjbv/crc-debug-xsgp5"] Nov 25 17:05:25 crc kubenswrapper[4879]: I1125 17:05:25.439202 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbjbv/crc-debug-xsgp5"] Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.508242 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.534760 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-host\") pod \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.534810 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x25dq\" (UniqueName: \"kubernetes.io/projected/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-kube-api-access-x25dq\") pod \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\" (UID: \"d7b47f9a-8cce-4b65-a21a-b1b761f8febe\") " Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.535980 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-host" (OuterVolumeSpecName: "host") pod "d7b47f9a-8cce-4b65-a21a-b1b761f8febe" (UID: "d7b47f9a-8cce-4b65-a21a-b1b761f8febe"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.543388 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-kube-api-access-x25dq" (OuterVolumeSpecName: "kube-api-access-x25dq") pod "d7b47f9a-8cce-4b65-a21a-b1b761f8febe" (UID: "d7b47f9a-8cce-4b65-a21a-b1b761f8febe"). InnerVolumeSpecName "kube-api-access-x25dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.637457 4879 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-host\") on node \"crc\" DevicePath \"\"" Nov 25 17:05:26 crc kubenswrapper[4879]: I1125 17:05:26.637497 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x25dq\" (UniqueName: \"kubernetes.io/projected/d7b47f9a-8cce-4b65-a21a-b1b761f8febe-kube-api-access-x25dq\") on node \"crc\" DevicePath \"\"" Nov 25 17:05:27 crc kubenswrapper[4879]: I1125 17:05:27.408420 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/crc-debug-xsgp5" Nov 25 17:05:27 crc kubenswrapper[4879]: I1125 17:05:27.408462 4879 scope.go:117] "RemoveContainer" containerID="0f65457247b5452dbee9b5c5f27266d119fd9958fad463a116ee9dc11241a82f" Nov 25 17:05:27 crc kubenswrapper[4879]: I1125 17:05:27.663732 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7b47f9a-8cce-4b65-a21a-b1b761f8febe" path="/var/lib/kubelet/pods/d7b47f9a-8cce-4b65-a21a-b1b761f8febe/volumes" Nov 25 17:06:47 crc kubenswrapper[4879]: I1125 17:06:47.409530 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:06:47 crc kubenswrapper[4879]: I1125 17:06:47.410208 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:06:58 crc kubenswrapper[4879]: I1125 17:06:58.750643 4879 scope.go:117] "RemoveContainer" containerID="5e2fcafa21edaf5d2c8fcb1263407bbff59808bddd93cc18ff2f4bafedbdf443" Nov 25 17:06:58 crc kubenswrapper[4879]: I1125 17:06:58.788270 4879 scope.go:117] "RemoveContainer" containerID="24f5e2e4674752337415313b502d5363f20fc0bc2459fda07a7349a83256865d" Nov 25 17:06:58 crc kubenswrapper[4879]: I1125 17:06:58.849808 4879 scope.go:117] "RemoveContainer" containerID="a5b87f3612d11ed6f3ede11688c104566040077b3543d56d19e4d619becd6ac8" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.338736 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4bhtk"] Nov 25 17:07:06 crc kubenswrapper[4879]: E1125 17:07:06.341654 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7b47f9a-8cce-4b65-a21a-b1b761f8febe" containerName="container-00" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.341785 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7b47f9a-8cce-4b65-a21a-b1b761f8febe" containerName="container-00" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.342178 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7b47f9a-8cce-4b65-a21a-b1b761f8febe" containerName="container-00" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.344514 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.356738 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bhtk"] Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.444096 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-catalog-content\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.444367 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmp6v\" (UniqueName: \"kubernetes.io/projected/52a44299-fdd8-4cdc-9545-9622b57c1196-kube-api-access-fmp6v\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.444674 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-utilities\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.546472 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-utilities\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.546683 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-catalog-content\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.546763 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmp6v\" (UniqueName: \"kubernetes.io/projected/52a44299-fdd8-4cdc-9545-9622b57c1196-kube-api-access-fmp6v\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.547505 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-utilities\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.547756 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-catalog-content\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.574644 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmp6v\" (UniqueName: \"kubernetes.io/projected/52a44299-fdd8-4cdc-9545-9622b57c1196-kube-api-access-fmp6v\") pod \"certified-operators-4bhtk\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:06 crc kubenswrapper[4879]: I1125 17:07:06.677365 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:07 crc kubenswrapper[4879]: I1125 17:07:07.220841 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bhtk"] Nov 25 17:07:07 crc kubenswrapper[4879]: I1125 17:07:07.491340 4879 generic.go:334] "Generic (PLEG): container finished" podID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerID="1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd" exitCode=0 Nov 25 17:07:07 crc kubenswrapper[4879]: I1125 17:07:07.493017 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerDied","Data":"1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd"} Nov 25 17:07:07 crc kubenswrapper[4879]: I1125 17:07:07.493144 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerStarted","Data":"154236423cbfd5a7bb667d3038cd7b9c1086a08cf805afa137b4ea440074adeb"} Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.511391 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerStarted","Data":"d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f"} Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.757415 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d8rzr"] Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.761169 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.790262 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d8rzr"] Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.801328 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-catalog-content\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.801420 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-879kf\" (UniqueName: \"kubernetes.io/projected/8ae79c92-2508-4f50-b385-7b742a342fb9-kube-api-access-879kf\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.801498 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-utilities\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.905159 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-catalog-content\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.905257 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-879kf\" (UniqueName: \"kubernetes.io/projected/8ae79c92-2508-4f50-b385-7b742a342fb9-kube-api-access-879kf\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.905343 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-utilities\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.906156 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-utilities\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.906241 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-catalog-content\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:08 crc kubenswrapper[4879]: I1125 17:07:08.926377 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-879kf\" (UniqueName: \"kubernetes.io/projected/8ae79c92-2508-4f50-b385-7b742a342fb9-kube-api-access-879kf\") pod \"redhat-operators-d8rzr\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:09 crc kubenswrapper[4879]: I1125 17:07:09.110042 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:09 crc kubenswrapper[4879]: I1125 17:07:09.660942 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d8rzr"] Nov 25 17:07:10 crc kubenswrapper[4879]: I1125 17:07:10.535775 4879 generic.go:334] "Generic (PLEG): container finished" podID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerID="6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077" exitCode=0 Nov 25 17:07:10 crc kubenswrapper[4879]: I1125 17:07:10.535986 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerDied","Data":"6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077"} Nov 25 17:07:10 crc kubenswrapper[4879]: I1125 17:07:10.536185 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerStarted","Data":"aee23303e1bd3c6362c71de01fa118be709335830579a95439cbdaaecfee0c58"} Nov 25 17:07:10 crc kubenswrapper[4879]: I1125 17:07:10.540616 4879 generic.go:334] "Generic (PLEG): container finished" podID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerID="d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f" exitCode=0 Nov 25 17:07:10 crc kubenswrapper[4879]: I1125 17:07:10.540656 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerDied","Data":"d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f"} Nov 25 17:07:12 crc kubenswrapper[4879]: I1125 17:07:12.579030 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerStarted","Data":"08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10"} Nov 25 17:07:12 crc kubenswrapper[4879]: I1125 17:07:12.583516 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerStarted","Data":"b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0"} Nov 25 17:07:12 crc kubenswrapper[4879]: I1125 17:07:12.634933 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4bhtk" podStartSLOduration=3.07346 podStartE2EDuration="6.634909316s" podCreationTimestamp="2025-11-25 17:07:06 +0000 UTC" firstStartedPulling="2025-11-25 17:07:07.496008154 +0000 UTC m=+9719.099421225" lastFinishedPulling="2025-11-25 17:07:11.05745743 +0000 UTC m=+9722.660870541" observedRunningTime="2025-11-25 17:07:12.620712852 +0000 UTC m=+9724.224125973" watchObservedRunningTime="2025-11-25 17:07:12.634909316 +0000 UTC m=+9724.238322397" Nov 25 17:07:16 crc kubenswrapper[4879]: I1125 17:07:16.677975 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:16 crc kubenswrapper[4879]: I1125 17:07:16.678925 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:16 crc kubenswrapper[4879]: I1125 17:07:16.736580 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:17 crc kubenswrapper[4879]: I1125 17:07:17.408406 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:07:17 crc kubenswrapper[4879]: I1125 17:07:17.408756 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:07:17 crc kubenswrapper[4879]: I1125 17:07:17.658769 4879 generic.go:334] "Generic (PLEG): container finished" podID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerID="08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10" exitCode=0 Nov 25 17:07:17 crc kubenswrapper[4879]: I1125 17:07:17.658900 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerDied","Data":"08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10"} Nov 25 17:07:17 crc kubenswrapper[4879]: I1125 17:07:17.718103 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:18 crc kubenswrapper[4879]: I1125 17:07:18.331219 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bhtk"] Nov 25 17:07:18 crc kubenswrapper[4879]: I1125 17:07:18.671738 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerStarted","Data":"6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf"} Nov 25 17:07:19 crc kubenswrapper[4879]: I1125 17:07:19.110315 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:19 crc kubenswrapper[4879]: I1125 17:07:19.110386 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:19 crc kubenswrapper[4879]: I1125 17:07:19.681408 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4bhtk" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="registry-server" containerID="cri-o://b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0" gracePeriod=2 Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.163521 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d8rzr" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="registry-server" probeResult="failure" output=< Nov 25 17:07:20 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 17:07:20 crc kubenswrapper[4879]: > Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.184907 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.221777 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d8rzr" podStartSLOduration=4.686159482 podStartE2EDuration="12.221749433s" podCreationTimestamp="2025-11-25 17:07:08 +0000 UTC" firstStartedPulling="2025-11-25 17:07:10.538054865 +0000 UTC m=+9722.141467936" lastFinishedPulling="2025-11-25 17:07:18.073644796 +0000 UTC m=+9729.677057887" observedRunningTime="2025-11-25 17:07:18.707867996 +0000 UTC m=+9730.311281057" watchObservedRunningTime="2025-11-25 17:07:20.221749433 +0000 UTC m=+9731.825162524" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.292773 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-utilities\") pod \"52a44299-fdd8-4cdc-9545-9622b57c1196\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.293053 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-catalog-content\") pod \"52a44299-fdd8-4cdc-9545-9622b57c1196\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.293269 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmp6v\" (UniqueName: \"kubernetes.io/projected/52a44299-fdd8-4cdc-9545-9622b57c1196-kube-api-access-fmp6v\") pod \"52a44299-fdd8-4cdc-9545-9622b57c1196\" (UID: \"52a44299-fdd8-4cdc-9545-9622b57c1196\") " Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.293577 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-utilities" (OuterVolumeSpecName: "utilities") pod "52a44299-fdd8-4cdc-9545-9622b57c1196" (UID: "52a44299-fdd8-4cdc-9545-9622b57c1196"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.293993 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.301264 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52a44299-fdd8-4cdc-9545-9622b57c1196-kube-api-access-fmp6v" (OuterVolumeSpecName: "kube-api-access-fmp6v") pod "52a44299-fdd8-4cdc-9545-9622b57c1196" (UID: "52a44299-fdd8-4cdc-9545-9622b57c1196"). InnerVolumeSpecName "kube-api-access-fmp6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.341100 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52a44299-fdd8-4cdc-9545-9622b57c1196" (UID: "52a44299-fdd8-4cdc-9545-9622b57c1196"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.395917 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52a44299-fdd8-4cdc-9545-9622b57c1196-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.396440 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmp6v\" (UniqueName: \"kubernetes.io/projected/52a44299-fdd8-4cdc-9545-9622b57c1196-kube-api-access-fmp6v\") on node \"crc\" DevicePath \"\"" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.694974 4879 generic.go:334] "Generic (PLEG): container finished" podID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerID="b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0" exitCode=0 Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.695026 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerDied","Data":"b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0"} Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.695051 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bhtk" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.695075 4879 scope.go:117] "RemoveContainer" containerID="b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.695058 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bhtk" event={"ID":"52a44299-fdd8-4cdc-9545-9622b57c1196","Type":"ContainerDied","Data":"154236423cbfd5a7bb667d3038cd7b9c1086a08cf805afa137b4ea440074adeb"} Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.721375 4879 scope.go:117] "RemoveContainer" containerID="d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.734406 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4bhtk"] Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.748229 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4bhtk"] Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.761779 4879 scope.go:117] "RemoveContainer" containerID="1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.796735 4879 scope.go:117] "RemoveContainer" containerID="b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0" Nov 25 17:07:20 crc kubenswrapper[4879]: E1125 17:07:20.797222 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0\": container with ID starting with b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0 not found: ID does not exist" containerID="b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.797273 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0"} err="failed to get container status \"b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0\": rpc error: code = NotFound desc = could not find container \"b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0\": container with ID starting with b09741f527772d2673ed52938d47c859edeedd95fecea8276ab072034634f2d0 not found: ID does not exist" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.797313 4879 scope.go:117] "RemoveContainer" containerID="d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f" Nov 25 17:07:20 crc kubenswrapper[4879]: E1125 17:07:20.797702 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f\": container with ID starting with d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f not found: ID does not exist" containerID="d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.797730 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f"} err="failed to get container status \"d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f\": rpc error: code = NotFound desc = could not find container \"d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f\": container with ID starting with d6c8f4fd5228ade825982d59c40c4d4b0800e851dc21194f6f4d261a78077f0f not found: ID does not exist" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.797749 4879 scope.go:117] "RemoveContainer" containerID="1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd" Nov 25 17:07:20 crc kubenswrapper[4879]: E1125 17:07:20.798109 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd\": container with ID starting with 1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd not found: ID does not exist" containerID="1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd" Nov 25 17:07:20 crc kubenswrapper[4879]: I1125 17:07:20.798159 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd"} err="failed to get container status \"1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd\": rpc error: code = NotFound desc = could not find container \"1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd\": container with ID starting with 1343010781b66a33eeaa3e20d504d7bd0be1146de5a09eb145231c78d80e6dfd not found: ID does not exist" Nov 25 17:07:21 crc kubenswrapper[4879]: I1125 17:07:21.663084 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" path="/var/lib/kubelet/pods/52a44299-fdd8-4cdc-9545-9622b57c1196/volumes" Nov 25 17:07:29 crc kubenswrapper[4879]: I1125 17:07:29.177439 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:29 crc kubenswrapper[4879]: I1125 17:07:29.230196 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:31 crc kubenswrapper[4879]: I1125 17:07:31.727691 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d8rzr"] Nov 25 17:07:31 crc kubenswrapper[4879]: I1125 17:07:31.728491 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d8rzr" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="registry-server" containerID="cri-o://6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf" gracePeriod=2 Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.181697 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.291659 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-utilities\") pod \"8ae79c92-2508-4f50-b385-7b742a342fb9\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.291846 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-catalog-content\") pod \"8ae79c92-2508-4f50-b385-7b742a342fb9\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.291902 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-879kf\" (UniqueName: \"kubernetes.io/projected/8ae79c92-2508-4f50-b385-7b742a342fb9-kube-api-access-879kf\") pod \"8ae79c92-2508-4f50-b385-7b742a342fb9\" (UID: \"8ae79c92-2508-4f50-b385-7b742a342fb9\") " Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.292889 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-utilities" (OuterVolumeSpecName: "utilities") pod "8ae79c92-2508-4f50-b385-7b742a342fb9" (UID: "8ae79c92-2508-4f50-b385-7b742a342fb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.297033 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ae79c92-2508-4f50-b385-7b742a342fb9-kube-api-access-879kf" (OuterVolumeSpecName: "kube-api-access-879kf") pod "8ae79c92-2508-4f50-b385-7b742a342fb9" (UID: "8ae79c92-2508-4f50-b385-7b742a342fb9"). InnerVolumeSpecName "kube-api-access-879kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.370239 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ae79c92-2508-4f50-b385-7b742a342fb9" (UID: "8ae79c92-2508-4f50-b385-7b742a342fb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.394766 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.394805 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-879kf\" (UniqueName: \"kubernetes.io/projected/8ae79c92-2508-4f50-b385-7b742a342fb9-kube-api-access-879kf\") on node \"crc\" DevicePath \"\"" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.394818 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ae79c92-2508-4f50-b385-7b742a342fb9-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.860652 4879 generic.go:334] "Generic (PLEG): container finished" podID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerID="6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf" exitCode=0 Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.860708 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8rzr" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.860733 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerDied","Data":"6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf"} Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.861112 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8rzr" event={"ID":"8ae79c92-2508-4f50-b385-7b742a342fb9","Type":"ContainerDied","Data":"aee23303e1bd3c6362c71de01fa118be709335830579a95439cbdaaecfee0c58"} Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.861152 4879 scope.go:117] "RemoveContainer" containerID="6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.891555 4879 scope.go:117] "RemoveContainer" containerID="08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.927233 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d8rzr"] Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.935110 4879 scope.go:117] "RemoveContainer" containerID="6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.941660 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d8rzr"] Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.986549 4879 scope.go:117] "RemoveContainer" containerID="6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf" Nov 25 17:07:32 crc kubenswrapper[4879]: E1125 17:07:32.987181 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf\": container with ID starting with 6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf not found: ID does not exist" containerID="6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.987236 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf"} err="failed to get container status \"6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf\": rpc error: code = NotFound desc = could not find container \"6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf\": container with ID starting with 6eae33d43e7d2e10d355fe3f39c041ea99a89db1f7ffcb34895a94a5eda34eaf not found: ID does not exist" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.987269 4879 scope.go:117] "RemoveContainer" containerID="08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10" Nov 25 17:07:32 crc kubenswrapper[4879]: E1125 17:07:32.987513 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10\": container with ID starting with 08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10 not found: ID does not exist" containerID="08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.987541 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10"} err="failed to get container status \"08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10\": rpc error: code = NotFound desc = could not find container \"08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10\": container with ID starting with 08e3a6f89c505e0e86b836b7dce393aceaa10110b9e92ec8af7326659eac5d10 not found: ID does not exist" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.987557 4879 scope.go:117] "RemoveContainer" containerID="6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077" Nov 25 17:07:32 crc kubenswrapper[4879]: E1125 17:07:32.987754 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077\": container with ID starting with 6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077 not found: ID does not exist" containerID="6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077" Nov 25 17:07:32 crc kubenswrapper[4879]: I1125 17:07:32.987773 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077"} err="failed to get container status \"6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077\": rpc error: code = NotFound desc = could not find container \"6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077\": container with ID starting with 6d1272632545cb574037200e9a0394e8b4d59a85605b34159d741b27b76e0077 not found: ID does not exist" Nov 25 17:07:33 crc kubenswrapper[4879]: I1125 17:07:33.656061 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" path="/var/lib/kubelet/pods/8ae79c92-2508-4f50-b385-7b742a342fb9/volumes" Nov 25 17:07:47 crc kubenswrapper[4879]: I1125 17:07:47.408578 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:07:47 crc kubenswrapper[4879]: I1125 17:07:47.410406 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:07:47 crc kubenswrapper[4879]: I1125 17:07:47.410523 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 17:07:47 crc kubenswrapper[4879]: I1125 17:07:47.411462 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"743bd168195d960d26df5ccd667c43d47dd9ab1fd42cf6572baf38f0ee082d39"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 17:07:47 crc kubenswrapper[4879]: I1125 17:07:47.411637 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://743bd168195d960d26df5ccd667c43d47dd9ab1fd42cf6572baf38f0ee082d39" gracePeriod=600 Nov 25 17:07:48 crc kubenswrapper[4879]: I1125 17:07:48.033231 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="743bd168195d960d26df5ccd667c43d47dd9ab1fd42cf6572baf38f0ee082d39" exitCode=0 Nov 25 17:07:48 crc kubenswrapper[4879]: I1125 17:07:48.033309 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"743bd168195d960d26df5ccd667c43d47dd9ab1fd42cf6572baf38f0ee082d39"} Nov 25 17:07:48 crc kubenswrapper[4879]: I1125 17:07:48.033498 4879 scope.go:117] "RemoveContainer" containerID="8e9427ea2e3017b3341de6569cbad0882a4bc83ca61f787cdeadf1f30be4e152" Nov 25 17:07:49 crc kubenswrapper[4879]: I1125 17:07:49.045119 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206"} Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.878786 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t978d"] Nov 25 17:08:03 crc kubenswrapper[4879]: E1125 17:08:03.881331 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="extract-utilities" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.881418 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="extract-utilities" Nov 25 17:08:03 crc kubenswrapper[4879]: E1125 17:08:03.881493 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="extract-content" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.881550 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="extract-content" Nov 25 17:08:03 crc kubenswrapper[4879]: E1125 17:08:03.881616 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="extract-utilities" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.881681 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="extract-utilities" Nov 25 17:08:03 crc kubenswrapper[4879]: E1125 17:08:03.881759 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="extract-content" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.881814 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="extract-content" Nov 25 17:08:03 crc kubenswrapper[4879]: E1125 17:08:03.881877 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="registry-server" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.881929 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="registry-server" Nov 25 17:08:03 crc kubenswrapper[4879]: E1125 17:08:03.882004 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="registry-server" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.882057 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="registry-server" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.883149 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="52a44299-fdd8-4cdc-9545-9622b57c1196" containerName="registry-server" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.883248 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ae79c92-2508-4f50-b385-7b742a342fb9" containerName="registry-server" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.884907 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:03 crc kubenswrapper[4879]: I1125 17:08:03.894243 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t978d"] Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.017837 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-catalog-content\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.017912 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-utilities\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.018253 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68fd8\" (UniqueName: \"kubernetes.io/projected/b474d0c2-50dd-488b-912e-ef719e663882-kube-api-access-68fd8\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.121391 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68fd8\" (UniqueName: \"kubernetes.io/projected/b474d0c2-50dd-488b-912e-ef719e663882-kube-api-access-68fd8\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.121654 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-catalog-content\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.121701 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-utilities\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.122487 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-catalog-content\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.122556 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-utilities\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.152855 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68fd8\" (UniqueName: \"kubernetes.io/projected/b474d0c2-50dd-488b-912e-ef719e663882-kube-api-access-68fd8\") pod \"community-operators-t978d\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.217084 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:04 crc kubenswrapper[4879]: I1125 17:08:04.818844 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t978d"] Nov 25 17:08:05 crc kubenswrapper[4879]: I1125 17:08:05.217615 4879 generic.go:334] "Generic (PLEG): container finished" podID="b474d0c2-50dd-488b-912e-ef719e663882" containerID="f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5" exitCode=0 Nov 25 17:08:05 crc kubenswrapper[4879]: I1125 17:08:05.217684 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerDied","Data":"f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5"} Nov 25 17:08:05 crc kubenswrapper[4879]: I1125 17:08:05.219276 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerStarted","Data":"4cbc5ee9c5cf7ca90035960d4e79f796d1a69aaad4b080b451656523f4dc595b"} Nov 25 17:08:06 crc kubenswrapper[4879]: I1125 17:08:06.236832 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerStarted","Data":"18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9"} Nov 25 17:08:09 crc kubenswrapper[4879]: I1125 17:08:09.280604 4879 generic.go:334] "Generic (PLEG): container finished" podID="b474d0c2-50dd-488b-912e-ef719e663882" containerID="18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9" exitCode=0 Nov 25 17:08:09 crc kubenswrapper[4879]: I1125 17:08:09.280700 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerDied","Data":"18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9"} Nov 25 17:08:10 crc kubenswrapper[4879]: I1125 17:08:10.301808 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerStarted","Data":"b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac"} Nov 25 17:08:10 crc kubenswrapper[4879]: I1125 17:08:10.328190 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t978d" podStartSLOduration=2.618786493 podStartE2EDuration="7.3281657s" podCreationTimestamp="2025-11-25 17:08:03 +0000 UTC" firstStartedPulling="2025-11-25 17:08:05.220140982 +0000 UTC m=+9776.823554063" lastFinishedPulling="2025-11-25 17:08:09.929520199 +0000 UTC m=+9781.532933270" observedRunningTime="2025-11-25 17:08:10.323051271 +0000 UTC m=+9781.926464352" watchObservedRunningTime="2025-11-25 17:08:10.3281657 +0000 UTC m=+9781.931578781" Nov 25 17:08:14 crc kubenswrapper[4879]: I1125 17:08:14.218143 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:14 crc kubenswrapper[4879]: I1125 17:08:14.218785 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:14 crc kubenswrapper[4879]: I1125 17:08:14.364631 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:24 crc kubenswrapper[4879]: I1125 17:08:24.270516 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:24 crc kubenswrapper[4879]: I1125 17:08:24.325499 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t978d"] Nov 25 17:08:24 crc kubenswrapper[4879]: I1125 17:08:24.446253 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t978d" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="registry-server" containerID="cri-o://b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac" gracePeriod=2 Nov 25 17:08:24 crc kubenswrapper[4879]: I1125 17:08:24.946538 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.046972 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-catalog-content\") pod \"b474d0c2-50dd-488b-912e-ef719e663882\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.047036 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68fd8\" (UniqueName: \"kubernetes.io/projected/b474d0c2-50dd-488b-912e-ef719e663882-kube-api-access-68fd8\") pod \"b474d0c2-50dd-488b-912e-ef719e663882\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.047356 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-utilities\") pod \"b474d0c2-50dd-488b-912e-ef719e663882\" (UID: \"b474d0c2-50dd-488b-912e-ef719e663882\") " Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.048163 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-utilities" (OuterVolumeSpecName: "utilities") pod "b474d0c2-50dd-488b-912e-ef719e663882" (UID: "b474d0c2-50dd-488b-912e-ef719e663882"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.099314 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b474d0c2-50dd-488b-912e-ef719e663882" (UID: "b474d0c2-50dd-488b-912e-ef719e663882"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.150172 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.150219 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b474d0c2-50dd-488b-912e-ef719e663882-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.459551 4879 generic.go:334] "Generic (PLEG): container finished" podID="b474d0c2-50dd-488b-912e-ef719e663882" containerID="b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac" exitCode=0 Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.459607 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerDied","Data":"b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac"} Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.459637 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t978d" event={"ID":"b474d0c2-50dd-488b-912e-ef719e663882","Type":"ContainerDied","Data":"4cbc5ee9c5cf7ca90035960d4e79f796d1a69aaad4b080b451656523f4dc595b"} Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.459657 4879 scope.go:117] "RemoveContainer" containerID="b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.459797 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t978d" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.482472 4879 scope.go:117] "RemoveContainer" containerID="18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.645690 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b474d0c2-50dd-488b-912e-ef719e663882-kube-api-access-68fd8" (OuterVolumeSpecName: "kube-api-access-68fd8") pod "b474d0c2-50dd-488b-912e-ef719e663882" (UID: "b474d0c2-50dd-488b-912e-ef719e663882"). InnerVolumeSpecName "kube-api-access-68fd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.658834 4879 scope.go:117] "RemoveContainer" containerID="f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.667879 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68fd8\" (UniqueName: \"kubernetes.io/projected/b474d0c2-50dd-488b-912e-ef719e663882-kube-api-access-68fd8\") on node \"crc\" DevicePath \"\"" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.726054 4879 scope.go:117] "RemoveContainer" containerID="b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac" Nov 25 17:08:25 crc kubenswrapper[4879]: E1125 17:08:25.726664 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac\": container with ID starting with b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac not found: ID does not exist" containerID="b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.726699 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac"} err="failed to get container status \"b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac\": rpc error: code = NotFound desc = could not find container \"b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac\": container with ID starting with b433b9061bf2294f3940294420a88ef80f76615a476d34621f4ed1a6030508ac not found: ID does not exist" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.726720 4879 scope.go:117] "RemoveContainer" containerID="18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9" Nov 25 17:08:25 crc kubenswrapper[4879]: E1125 17:08:25.726991 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9\": container with ID starting with 18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9 not found: ID does not exist" containerID="18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.727017 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9"} err="failed to get container status \"18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9\": rpc error: code = NotFound desc = could not find container \"18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9\": container with ID starting with 18e5eec54d156aea9e0e0fa8e75895464184a4363460c7bcc0f0c8dbc5a9cbc9 not found: ID does not exist" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.727032 4879 scope.go:117] "RemoveContainer" containerID="f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5" Nov 25 17:08:25 crc kubenswrapper[4879]: E1125 17:08:25.731402 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5\": container with ID starting with f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5 not found: ID does not exist" containerID="f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.731462 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5"} err="failed to get container status \"f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5\": rpc error: code = NotFound desc = could not find container \"f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5\": container with ID starting with f8353ce8a1a9a574df67e51b44dc26536ac4ac36eb096aa5e4ff39d8c007f8e5 not found: ID does not exist" Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.795842 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t978d"] Nov 25 17:08:25 crc kubenswrapper[4879]: I1125 17:08:25.816085 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t978d"] Nov 25 17:08:27 crc kubenswrapper[4879]: I1125 17:08:27.659729 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b474d0c2-50dd-488b-912e-ef719e663882" path="/var/lib/kubelet/pods/b474d0c2-50dd-488b-912e-ef719e663882/volumes" Nov 25 17:10:17 crc kubenswrapper[4879]: I1125 17:10:17.408795 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:10:17 crc kubenswrapper[4879]: I1125 17:10:17.409368 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:10:47 crc kubenswrapper[4879]: I1125 17:10:47.409860 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:10:47 crc kubenswrapper[4879]: I1125 17:10:47.410618 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.676037 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mbbdg"] Nov 25 17:11:06 crc kubenswrapper[4879]: E1125 17:11:06.677515 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="extract-utilities" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.677528 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="extract-utilities" Nov 25 17:11:06 crc kubenswrapper[4879]: E1125 17:11:06.677557 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="extract-content" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.677564 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="extract-content" Nov 25 17:11:06 crc kubenswrapper[4879]: E1125 17:11:06.677582 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="registry-server" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.677588 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="registry-server" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.677872 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="b474d0c2-50dd-488b-912e-ef719e663882" containerName="registry-server" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.679622 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.701848 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mbbdg"] Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.800173 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dchsg\" (UniqueName: \"kubernetes.io/projected/3e30be64-aa52-4604-960f-e1fb28c6a87e-kube-api-access-dchsg\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.800326 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-utilities\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.800350 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-catalog-content\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.901961 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dchsg\" (UniqueName: \"kubernetes.io/projected/3e30be64-aa52-4604-960f-e1fb28c6a87e-kube-api-access-dchsg\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.902109 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-utilities\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.902150 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-catalog-content\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.902686 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-catalog-content\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.903197 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-utilities\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:06 crc kubenswrapper[4879]: I1125 17:11:06.925620 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dchsg\" (UniqueName: \"kubernetes.io/projected/3e30be64-aa52-4604-960f-e1fb28c6a87e-kube-api-access-dchsg\") pod \"redhat-marketplace-mbbdg\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:07 crc kubenswrapper[4879]: I1125 17:11:07.006080 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:07 crc kubenswrapper[4879]: I1125 17:11:07.569778 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mbbdg"] Nov 25 17:11:08 crc kubenswrapper[4879]: I1125 17:11:08.210331 4879 generic.go:334] "Generic (PLEG): container finished" podID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerID="827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0" exitCode=0 Nov 25 17:11:08 crc kubenswrapper[4879]: I1125 17:11:08.210443 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerDied","Data":"827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0"} Nov 25 17:11:08 crc kubenswrapper[4879]: I1125 17:11:08.210713 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerStarted","Data":"fbd7a94496555fdfc00062e785866389ad921b079734f317ae775f09448adfcf"} Nov 25 17:11:08 crc kubenswrapper[4879]: I1125 17:11:08.213778 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 17:11:09 crc kubenswrapper[4879]: I1125 17:11:09.227564 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerStarted","Data":"112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081"} Nov 25 17:11:10 crc kubenswrapper[4879]: I1125 17:11:10.243639 4879 generic.go:334] "Generic (PLEG): container finished" podID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerID="112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081" exitCode=0 Nov 25 17:11:10 crc kubenswrapper[4879]: I1125 17:11:10.243733 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerDied","Data":"112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081"} Nov 25 17:11:11 crc kubenswrapper[4879]: I1125 17:11:11.258667 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerStarted","Data":"2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b"} Nov 25 17:11:11 crc kubenswrapper[4879]: I1125 17:11:11.287092 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mbbdg" podStartSLOduration=2.745992198 podStartE2EDuration="5.287065669s" podCreationTimestamp="2025-11-25 17:11:06 +0000 UTC" firstStartedPulling="2025-11-25 17:11:08.213549532 +0000 UTC m=+9959.816962603" lastFinishedPulling="2025-11-25 17:11:10.754623003 +0000 UTC m=+9962.358036074" observedRunningTime="2025-11-25 17:11:11.279865835 +0000 UTC m=+9962.883278906" watchObservedRunningTime="2025-11-25 17:11:11.287065669 +0000 UTC m=+9962.890478740" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.007051 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.009400 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.062885 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.409063 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.409141 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.409190 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.410057 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.410135 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" gracePeriod=600 Nov 25 17:11:17 crc kubenswrapper[4879]: E1125 17:11:17.530575 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.729804 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" exitCode=0 Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.729831 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206"} Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.730639 4879 scope.go:117] "RemoveContainer" containerID="743bd168195d960d26df5ccd667c43d47dd9ab1fd42cf6572baf38f0ee082d39" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.731483 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:11:17 crc kubenswrapper[4879]: E1125 17:11:17.731948 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.814090 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:17 crc kubenswrapper[4879]: I1125 17:11:17.866474 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mbbdg"] Nov 25 17:11:19 crc kubenswrapper[4879]: I1125 17:11:19.755553 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mbbdg" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="registry-server" containerID="cri-o://2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b" gracePeriod=2 Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.294133 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.399536 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-catalog-content\") pod \"3e30be64-aa52-4604-960f-e1fb28c6a87e\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.399685 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-utilities\") pod \"3e30be64-aa52-4604-960f-e1fb28c6a87e\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.399710 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dchsg\" (UniqueName: \"kubernetes.io/projected/3e30be64-aa52-4604-960f-e1fb28c6a87e-kube-api-access-dchsg\") pod \"3e30be64-aa52-4604-960f-e1fb28c6a87e\" (UID: \"3e30be64-aa52-4604-960f-e1fb28c6a87e\") " Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.400837 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-utilities" (OuterVolumeSpecName: "utilities") pod "3e30be64-aa52-4604-960f-e1fb28c6a87e" (UID: "3e30be64-aa52-4604-960f-e1fb28c6a87e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.405006 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e30be64-aa52-4604-960f-e1fb28c6a87e-kube-api-access-dchsg" (OuterVolumeSpecName: "kube-api-access-dchsg") pod "3e30be64-aa52-4604-960f-e1fb28c6a87e" (UID: "3e30be64-aa52-4604-960f-e1fb28c6a87e"). InnerVolumeSpecName "kube-api-access-dchsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.424055 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e30be64-aa52-4604-960f-e1fb28c6a87e" (UID: "3e30be64-aa52-4604-960f-e1fb28c6a87e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.501930 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.501960 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e30be64-aa52-4604-960f-e1fb28c6a87e-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.501971 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dchsg\" (UniqueName: \"kubernetes.io/projected/3e30be64-aa52-4604-960f-e1fb28c6a87e-kube-api-access-dchsg\") on node \"crc\" DevicePath \"\"" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.777363 4879 generic.go:334] "Generic (PLEG): container finished" podID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerID="2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b" exitCode=0 Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.777436 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerDied","Data":"2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b"} Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.777506 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mbbdg" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.777530 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mbbdg" event={"ID":"3e30be64-aa52-4604-960f-e1fb28c6a87e","Type":"ContainerDied","Data":"fbd7a94496555fdfc00062e785866389ad921b079734f317ae775f09448adfcf"} Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.777583 4879 scope.go:117] "RemoveContainer" containerID="2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.837367 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mbbdg"] Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.837622 4879 scope.go:117] "RemoveContainer" containerID="112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.851777 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mbbdg"] Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.868411 4879 scope.go:117] "RemoveContainer" containerID="827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.920070 4879 scope.go:117] "RemoveContainer" containerID="2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b" Nov 25 17:11:20 crc kubenswrapper[4879]: E1125 17:11:20.920696 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b\": container with ID starting with 2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b not found: ID does not exist" containerID="2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.920749 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b"} err="failed to get container status \"2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b\": rpc error: code = NotFound desc = could not find container \"2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b\": container with ID starting with 2fc8bb1fc14a1c75ad9c3bb8e4eb42c241a09bed8d8f2e9b39d46b3061384d6b not found: ID does not exist" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.920780 4879 scope.go:117] "RemoveContainer" containerID="112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081" Nov 25 17:11:20 crc kubenswrapper[4879]: E1125 17:11:20.921247 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081\": container with ID starting with 112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081 not found: ID does not exist" containerID="112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.921290 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081"} err="failed to get container status \"112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081\": rpc error: code = NotFound desc = could not find container \"112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081\": container with ID starting with 112a909e1f25f4e76baca314f4efb28fa7b98ba04c7935efd80fb30f5761e081 not found: ID does not exist" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.921317 4879 scope.go:117] "RemoveContainer" containerID="827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0" Nov 25 17:11:20 crc kubenswrapper[4879]: E1125 17:11:20.921592 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0\": container with ID starting with 827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0 not found: ID does not exist" containerID="827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0" Nov 25 17:11:20 crc kubenswrapper[4879]: I1125 17:11:20.921612 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0"} err="failed to get container status \"827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0\": rpc error: code = NotFound desc = could not find container \"827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0\": container with ID starting with 827621a0dc07d9b3ab8a6c87cc73bc3afb2cb61aa3fadfb4b776f3670c2ebca0 not found: ID does not exist" Nov 25 17:11:21 crc kubenswrapper[4879]: I1125 17:11:21.673817 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" path="/var/lib/kubelet/pods/3e30be64-aa52-4604-960f-e1fb28c6a87e/volumes" Nov 25 17:11:32 crc kubenswrapper[4879]: I1125 17:11:32.644908 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:11:32 crc kubenswrapper[4879]: E1125 17:11:32.645707 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:11:44 crc kubenswrapper[4879]: I1125 17:11:44.645566 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:11:44 crc kubenswrapper[4879]: E1125 17:11:44.646385 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:11:58 crc kubenswrapper[4879]: I1125 17:11:58.645053 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:11:58 crc kubenswrapper[4879]: E1125 17:11:58.645859 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:12:09 crc kubenswrapper[4879]: I1125 17:12:09.654876 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:12:09 crc kubenswrapper[4879]: E1125 17:12:09.655736 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:12:24 crc kubenswrapper[4879]: I1125 17:12:24.645225 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:12:24 crc kubenswrapper[4879]: E1125 17:12:24.646023 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:12:37 crc kubenswrapper[4879]: I1125 17:12:37.646370 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:12:37 crc kubenswrapper[4879]: E1125 17:12:37.647995 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:12:52 crc kubenswrapper[4879]: I1125 17:12:52.645173 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:12:52 crc kubenswrapper[4879]: E1125 17:12:52.645917 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:13:04 crc kubenswrapper[4879]: I1125 17:13:04.645040 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:13:04 crc kubenswrapper[4879]: E1125 17:13:04.645949 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:13:19 crc kubenswrapper[4879]: I1125 17:13:19.657005 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:13:19 crc kubenswrapper[4879]: E1125 17:13:19.658693 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:13:34 crc kubenswrapper[4879]: I1125 17:13:34.644504 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:13:34 crc kubenswrapper[4879]: E1125 17:13:34.646055 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:13:46 crc kubenswrapper[4879]: I1125 17:13:46.644630 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:13:46 crc kubenswrapper[4879]: E1125 17:13:46.645549 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:13:57 crc kubenswrapper[4879]: I1125 17:13:57.644652 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:13:57 crc kubenswrapper[4879]: E1125 17:13:57.645311 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:14:09 crc kubenswrapper[4879]: I1125 17:14:09.654791 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:14:09 crc kubenswrapper[4879]: E1125 17:14:09.655834 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.500438 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_facb862c-2078-49c9-ab83-5cab4bcaee93/init-config-reloader/0.log" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.668727 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_facb862c-2078-49c9-ab83-5cab4bcaee93/init-config-reloader/0.log" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.682080 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_facb862c-2078-49c9-ab83-5cab4bcaee93/alertmanager/0.log" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.718389 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_facb862c-2078-49c9-ab83-5cab4bcaee93/config-reloader/0.log" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.858921 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_a56d3003-901e-46ce-99d5-86ea06b4915a/aodh-api/0.log" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.871571 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_a56d3003-901e-46ce-99d5-86ea06b4915a/aodh-evaluator/0.log" Nov 25 17:14:18 crc kubenswrapper[4879]: I1125 17:14:18.921074 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_a56d3003-901e-46ce-99d5-86ea06b4915a/aodh-listener/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.057115 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_a56d3003-901e-46ce-99d5-86ea06b4915a/aodh-notifier/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.072882 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-584c9766db-7l7kz_12429423-60c1-42f9-bb72-bdb45cdddd45/barbican-api/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.138774 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-584c9766db-7l7kz_12429423-60c1-42f9-bb72-bdb45cdddd45/barbican-api-log/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.267657 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c74ff6974-9qdrm_331f36f8-4a95-4b48-a224-7366506b0b1f/barbican-keystone-listener/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.341850 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c74ff6974-9qdrm_331f36f8-4a95-4b48-a224-7366506b0b1f/barbican-keystone-listener-log/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.463422 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b85ff7f6c-kfq49_de155fb1-303b-4959-a523-07f6f63f38f0/barbican-worker/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.518758 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b85ff7f6c-kfq49_de155fb1-303b-4959-a523-07f6f63f38f0/barbican-worker-log/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.603333 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-fcw6q_524593c5-f8a9-41e4-99b0-2ef4f69b37b3/bootstrap-openstack-openstack-cell1/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.724365 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f132884-e184-47e7-8a5e-61299b10f83b/ceilometer-central-agent/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.842058 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f132884-e184-47e7-8a5e-61299b10f83b/ceilometer-notification-agent/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.846274 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f132884-e184-47e7-8a5e-61299b10f83b/proxy-httpd/0.log" Nov 25 17:14:19 crc kubenswrapper[4879]: I1125 17:14:19.909546 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3f132884-e184-47e7-8a5e-61299b10f83b/sg-core/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.046783 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-dwnhv_a7410331-ac6a-440c-ae28-b480ff1cee46/ceph-client-openstack-openstack-cell1/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.141878 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e3a59410-e11a-420f-a12a-ffc6e4e70da5/cinder-api/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.455905 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_e3a59410-e11a-420f-a12a-ffc6e4e70da5/cinder-api-log/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.701391 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_94d9e6dc-e680-4e3c-a4d9-29e638f2e47e/probe/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.745767 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_94d9e6dc-e680-4e3c-a4d9-29e638f2e47e/cinder-backup/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.800228 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_58462743-d61e-44b0-bdfb-675330d8b5ad/cinder-scheduler/0.log" Nov 25 17:14:20 crc kubenswrapper[4879]: I1125 17:14:20.952850 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_58462743-d61e-44b0-bdfb-675330d8b5ad/probe/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.097792 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_3fbeb883-d1df-4c65-8125-cdeb73794af3/cinder-volume/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.099913 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_3fbeb883-d1df-4c65-8125-cdeb73794af3/probe/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.249443 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-fxbrk_89862778-c037-4fb8-8424-dfc99af4f6a4/configure-network-openstack-openstack-cell1/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.340282 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-v42kw_e6f04bfd-aca5-4e3b-b77e-301b43048202/configure-os-openstack-openstack-cell1/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.470353 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-c8c87dd7f-pftfx_1e038d40-0bea-40b2-ad72-c77384d9a39e/init/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.707394 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-c8c87dd7f-pftfx_1e038d40-0bea-40b2-ad72-c77384d9a39e/init/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.757861 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-c8c87dd7f-pftfx_1e038d40-0bea-40b2-ad72-c77384d9a39e/dnsmasq-dns/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.800366 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-b8l44_455bbc30-41d7-431c-b0ff-dcda077069cc/download-cache-openstack-openstack-cell1/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.964766 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_202b8ca0-3167-46f7-98b7-643afff13caf/glance-log/0.log" Nov 25 17:14:21 crc kubenswrapper[4879]: I1125 17:14:21.983417 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_202b8ca0-3167-46f7-98b7-643afff13caf/glance-httpd/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.175482 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c6cc5c85-1149-4388-8606-859f5c8c2a14/glance-httpd/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.196241 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_c6cc5c85-1149-4388-8606-859f5c8c2a14/glance-log/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.254379 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-67959fbf64-b2h9v_b137cdf4-7328-4e8b-b8b7-015e8094122c/heat-api/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.621457 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-6c4b655ff8-7sj9f_574d8787-8b21-448d-b228-fbd1b54f30be/heat-cfnapi/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.638543 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-fcb9f8794-rb2q9_a2adfe45-39e8-43bd-8987-5109e1f80200/heat-engine/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.818711 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-dfc55cbfc-rlmwz_1368a223-0bf5-4797-b790-993da5053700/horizon/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.963579 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-dfc55cbfc-rlmwz_1368a223-0bf5-4797-b790-993da5053700/horizon-log/0.log" Nov 25 17:14:22 crc kubenswrapper[4879]: I1125 17:14:22.992463 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-tp6gv_c776aec9-eb9c-4b2f-b53c-0f0807213067/install-certs-openstack-openstack-cell1/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.084704 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-xxrt8_fde903cf-f64d-49b5-a9a6-fd0a054d979c/install-os-openstack-openstack-cell1/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.292253 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29401441-zjmkd_1c9d5fb6-90b7-44ee-a019-df27b5c22eef/keystone-cron/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.347595 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-c5d6457d8-szl6w_8937ae09-0ddc-4409-9419-dccc438b7ccb/keystone-api/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.455919 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7e4afeb0-b288-4960-905b-2cac22f0df5c/kube-state-metrics/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.529481 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29401501-fgvcs_eea642a0-9caf-483e-865b-c61b1300d15d/keystone-cron/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.660780 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-w86vc_6d4df058-78cf-4287-997d-36533614641c/libvirt-openstack-openstack-cell1/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.755600 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_95f83580-e814-453a-9123-0490abae84f2/manila-api-log/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.863065 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_95f83580-e814-453a-9123-0490abae84f2/manila-api/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.941388 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc/manila-scheduler/0.log" Nov 25 17:14:23 crc kubenswrapper[4879]: I1125 17:14:23.958845 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_b2d6a2ab-91bc-45c1-a3dc-b34c0f2c73bc/probe/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.074798 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_9b5edbf0-5410-4506-93b1-472e241e68c0/manila-share/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.135340 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_7db40489-095f-41fb-80d9-23d910f266bf/adoption/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.140152 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_9b5edbf0-5410-4506-93b1-472e241e68c0/probe/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.460578 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5d677fbdcc-5vh2w_871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a/neutron-api/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.492466 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5d677fbdcc-5vh2w_871a36ed-c8f0-4ad0-b8b7-0bb44f467c7a/neutron-httpd/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.645674 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:14:24 crc kubenswrapper[4879]: E1125 17:14:24.645977 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.740078 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-rhwf5_774dd8c2-a2e8-4639-82ab-122fa41f5dbb/neutron-dhcp-openstack-openstack-cell1/0.log" Nov 25 17:14:24 crc kubenswrapper[4879]: I1125 17:14:24.798878 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-sv8vm_eec6cf16-d1ed-4bba-a189-d4ce39dda66c/neutron-metadata-openstack-openstack-cell1/0.log" Nov 25 17:14:25 crc kubenswrapper[4879]: I1125 17:14:25.041213 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-gn4dc_faa2bad0-2473-4bea-b07b-1b95f0d02413/neutron-sriov-openstack-openstack-cell1/0.log" Nov 25 17:14:25 crc kubenswrapper[4879]: I1125 17:14:25.159526 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e63045ef-684f-4067-bf59-b762d1890157/nova-api-api/0.log" Nov 25 17:14:25 crc kubenswrapper[4879]: I1125 17:14:25.194268 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_e63045ef-684f-4067-bf59-b762d1890157/nova-api-log/0.log" Nov 25 17:14:25 crc kubenswrapper[4879]: I1125 17:14:25.361854 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c68eddd9-47a6-45ca-9873-636e7785a07d/nova-cell0-conductor-conductor/0.log" Nov 25 17:14:25 crc kubenswrapper[4879]: I1125 17:14:25.542150 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_c9c32c35-2748-44d3-9810-98ffc9c6011b/nova-cell1-conductor-conductor/0.log" Nov 25 17:14:26 crc kubenswrapper[4879]: I1125 17:14:26.342578 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_4d5221af-ab36-4a01-8c73-219f8bb76568/nova-cell1-novncproxy-novncproxy/0.log" Nov 25 17:14:26 crc kubenswrapper[4879]: I1125 17:14:26.362301 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellkmj29_6d1a0044-8107-4346-a960-be9ee93cc90c/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Nov 25 17:14:26 crc kubenswrapper[4879]: I1125 17:14:26.613754 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-sng5x_f09196b3-7c17-4117-9733-77a97644d23c/nova-cell1-openstack-openstack-cell1/0.log" Nov 25 17:14:26 crc kubenswrapper[4879]: I1125 17:14:26.717021 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b517bafe-e7fe-41b3-bf10-a1b2dfee55c2/nova-metadata-metadata/0.log" Nov 25 17:14:26 crc kubenswrapper[4879]: I1125 17:14:26.774484 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b517bafe-e7fe-41b3-bf10-a1b2dfee55c2/nova-metadata-log/0.log" Nov 25 17:14:26 crc kubenswrapper[4879]: I1125 17:14:26.961054 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_cb351db7-19bf-49ee-81e8-c660932014f9/nova-scheduler-scheduler/0.log" Nov 25 17:14:27 crc kubenswrapper[4879]: I1125 17:14:27.089106 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6df846d798-cksqh_7e906050-a1ea-4637-9c1d-5237d58fad24/init/0.log" Nov 25 17:14:27 crc kubenswrapper[4879]: I1125 17:14:27.324861 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6df846d798-cksqh_7e906050-a1ea-4637-9c1d-5237d58fad24/octavia-api-provider-agent/0.log" Nov 25 17:14:27 crc kubenswrapper[4879]: I1125 17:14:27.329161 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6df846d798-cksqh_7e906050-a1ea-4637-9c1d-5237d58fad24/init/0.log" Nov 25 17:14:27 crc kubenswrapper[4879]: I1125 17:14:27.692318 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-6df846d798-cksqh_7e906050-a1ea-4637-9c1d-5237d58fad24/octavia-api/0.log" Nov 25 17:14:27 crc kubenswrapper[4879]: I1125 17:14:27.969484 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-pcrl2_7f8f40c8-3493-4c88-be75-78de60f1d094/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.235889 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-pcrl2_7f8f40c8-3493-4c88-be75-78de60f1d094/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.339791 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-qqlr2_6fe2c7fb-209d-4890-8cdf-613c5b44fc47/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.384283 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-pcrl2_7f8f40c8-3493-4c88-be75-78de60f1d094/octavia-healthmanager/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.459453 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-qqlr2_6fe2c7fb-209d-4890-8cdf-613c5b44fc47/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.589082 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-rq4tk_fa7912a7-1d8d-437c-9611-2e2474e48f80/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.620049 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-qqlr2_6fe2c7fb-209d-4890-8cdf-613c5b44fc47/octavia-housekeeping/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.907222 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-6tbg6_85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.915886 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-rq4tk_fa7912a7-1d8d-437c-9611-2e2474e48f80/init/0.log" Nov 25 17:14:28 crc kubenswrapper[4879]: I1125 17:14:28.934516 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-rq4tk_fa7912a7-1d8d-437c-9611-2e2474e48f80/octavia-rsyslog/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.097342 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-6tbg6_85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9/init/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.254218 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_dce69d75-a372-4ec2-87d3-c84104c18dc6/mysql-bootstrap/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.291503 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-6tbg6_85ca893f-d4cb-4ee3-80d6-6d6b5b6804d9/octavia-worker/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.572768 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6d6cce1f-4a79-40b0-b252-83b49b6a4770/mysql-bootstrap/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.574470 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_dce69d75-a372-4ec2-87d3-c84104c18dc6/galera/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.616090 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_dce69d75-a372-4ec2-87d3-c84104c18dc6/mysql-bootstrap/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.812590 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6d6cce1f-4a79-40b0-b252-83b49b6a4770/mysql-bootstrap/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.871384 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_08153598-9efb-4dd7-8189-7c1cfbd76505/openstackclient/0.log" Nov 25 17:14:29 crc kubenswrapper[4879]: I1125 17:14:29.903603 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6d6cce1f-4a79-40b0-b252-83b49b6a4770/galera/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.100420 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zxj5g_fdcb65bc-485f-4f17-9167-a6407d985f44/openstack-network-exporter/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.197896 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-nvsvv_c7787cd4-dbb1-4a83-b79b-017ba868ad0f/ovsdb-server-init/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.344621 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-nvsvv_c7787cd4-dbb1-4a83-b79b-017ba868ad0f/ovsdb-server-init/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.385917 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-nvsvv_c7787cd4-dbb1-4a83-b79b-017ba868ad0f/ovsdb-server/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.414038 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-nvsvv_c7787cd4-dbb1-4a83-b79b-017ba868ad0f/ovs-vswitchd/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.561364 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-r78t7_33ca0631-dd7b-4592-a04c-ea75322c7323/ovn-controller/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.626373 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_6d94a7fd-3168-40a2-94ec-3b34536b1637/adoption/0.log" Nov 25 17:14:30 crc kubenswrapper[4879]: I1125 17:14:30.753087 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_866649f8-e50c-40d5-8b63-f419f521f9a6/openstack-network-exporter/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.028557 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_866649f8-e50c-40d5-8b63-f419f521f9a6/ovn-northd/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.163149 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-d74xc_6e8a543e-eaec-4564-82e0-39184cf58bab/ovn-openstack-openstack-cell1/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.264516 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_46bad026-7ba9-46a3-91fa-96d023831aa5/openstack-network-exporter/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.377828 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_46bad026-7ba9-46a3-91fa-96d023831aa5/ovsdbserver-nb/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.483929 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_424ac0b4-4812-4bbd-b151-5b00ed6b6b0d/openstack-network-exporter/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.487281 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_424ac0b4-4812-4bbd-b151-5b00ed6b6b0d/ovsdbserver-nb/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.686249 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_80c08ccb-e227-4456-aa16-e391ea9f7a1b/openstack-network-exporter/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.699429 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_80c08ccb-e227-4456-aa16-e391ea9f7a1b/ovsdbserver-nb/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.932050 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_88d08e32-ebc8-4bea-90f5-13da2037b453/ovsdbserver-sb/0.log" Nov 25 17:14:31 crc kubenswrapper[4879]: I1125 17:14:31.945857 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_88d08e32-ebc8-4bea-90f5-13da2037b453/openstack-network-exporter/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.092721 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_8d75a7ac-1bb1-4483-8ff3-6087d704e2c6/openstack-network-exporter/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.115285 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_8d75a7ac-1bb1-4483-8ff3-6087d704e2c6/ovsdbserver-sb/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.172524 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_dfe5dcf4-c62b-4d71-8459-f873013d4bc0/openstack-network-exporter/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.387745 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_dfe5dcf4-c62b-4d71-8459-f873013d4bc0/ovsdbserver-sb/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.485247 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-9cd976bcb-hcsm7_0a69eda1-9308-4df4-8135-27e97e2e834a/placement-api/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.539442 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-9cd976bcb-hcsm7_0a69eda1-9308-4df4-8135-27e97e2e834a/placement-log/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.652617 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cs4rcg_a31b4be8-7620-4231-a877-f2755303c565/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Nov 25 17:14:32 crc kubenswrapper[4879]: I1125 17:14:32.819494 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_3c36d2d1-d726-43e1-b145-42f753a0ef41/init-config-reloader/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.033833 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_3c36d2d1-d726-43e1-b145-42f753a0ef41/prometheus/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.065535 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_3c36d2d1-d726-43e1-b145-42f753a0ef41/init-config-reloader/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.080110 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_3c36d2d1-d726-43e1-b145-42f753a0ef41/config-reloader/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.088241 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_3c36d2d1-d726-43e1-b145-42f753a0ef41/thanos-sidecar/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.280435 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a503b64c-abfe-4b3b-a501-ef9b4203e56d/setup-container/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.519552 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a503b64c-abfe-4b3b-a501-ef9b4203e56d/setup-container/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.533828 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a503b64c-abfe-4b3b-a501-ef9b4203e56d/rabbitmq/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.579789 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a756a228-0a62-4e5f-a4fe-f728972087c7/setup-container/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.739387 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a756a228-0a62-4e5f-a4fe-f728972087c7/setup-container/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.781510 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a756a228-0a62-4e5f-a4fe-f728972087c7/rabbitmq/0.log" Nov 25 17:14:33 crc kubenswrapper[4879]: I1125 17:14:33.818547 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-lxvxv_3ad63e1e-3807-4ca6-8b47-e36e487f88f4/reboot-os-openstack-openstack-cell1/0.log" Nov 25 17:14:34 crc kubenswrapper[4879]: I1125 17:14:34.055907 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-wcnjd_2db6d808-8fb9-4a9c-a582-d56baec0b5e4/run-os-openstack-openstack-cell1/0.log" Nov 25 17:14:34 crc kubenswrapper[4879]: I1125 17:14:34.107566 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-cnqj8_e4aa46b6-8d47-4b77-b5f9-58a3c7006429/ssh-known-hosts-openstack/0.log" Nov 25 17:14:34 crc kubenswrapper[4879]: I1125 17:14:34.304897 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-56m2z_aa6e8e77-8d1f-4ad3-9c02-52aa840472b0/telemetry-openstack-openstack-cell1/0.log" Nov 25 17:14:34 crc kubenswrapper[4879]: I1125 17:14:34.503739 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-6bqdf_2101fdfc-c211-4108-aaab-562995e85279/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Nov 25 17:14:34 crc kubenswrapper[4879]: I1125 17:14:34.570008 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-6b78g_7dee0d4d-6402-46fe-95d5-dc69efb5feb3/validate-network-openstack-openstack-cell1/0.log" Nov 25 17:14:35 crc kubenswrapper[4879]: I1125 17:14:35.170781 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_9e5810bb-73c5-4c0c-9fd7-07556c6a600a/memcached/0.log" Nov 25 17:14:35 crc kubenswrapper[4879]: I1125 17:14:35.644559 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:14:35 crc kubenswrapper[4879]: E1125 17:14:35.644847 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:14:47 crc kubenswrapper[4879]: I1125 17:14:47.649698 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:14:47 crc kubenswrapper[4879]: E1125 17:14:47.650629 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:14:55 crc kubenswrapper[4879]: I1125 17:14:55.879976 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-86dc4d89c8-vf4mk_a8b8b0f7-f988-46b1-b88f-751261b1c6a1/kube-rbac-proxy/0.log" Nov 25 17:14:55 crc kubenswrapper[4879]: I1125 17:14:55.976993 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-86dc4d89c8-vf4mk_a8b8b0f7-f988-46b1-b88f-751261b1c6a1/manager/3.log" Nov 25 17:14:56 crc kubenswrapper[4879]: I1125 17:14:56.015742 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-86dc4d89c8-vf4mk_a8b8b0f7-f988-46b1-b88f-751261b1c6a1/manager/2.log" Nov 25 17:14:56 crc kubenswrapper[4879]: I1125 17:14:56.081494 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79856dc55c-sl6x7_461b714a-4ee7-40ab-99d3-cd78552b52c6/kube-rbac-proxy/0.log" Nov 25 17:14:56 crc kubenswrapper[4879]: I1125 17:14:56.757211 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79856dc55c-sl6x7_461b714a-4ee7-40ab-99d3-cd78552b52c6/manager/2.log" Nov 25 17:14:56 crc kubenswrapper[4879]: I1125 17:14:56.768594 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d695c9b56-4z42w_230849f3-daef-4f23-9839-8f0bd76d8e4a/kube-rbac-proxy/0.log" Nov 25 17:14:56 crc kubenswrapper[4879]: I1125 17:14:56.865457 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79856dc55c-sl6x7_461b714a-4ee7-40ab-99d3-cd78552b52c6/manager/3.log" Nov 25 17:14:56 crc kubenswrapper[4879]: I1125 17:14:56.956806 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d695c9b56-4z42w_230849f3-daef-4f23-9839-8f0bd76d8e4a/manager/2.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.045073 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d695c9b56-4z42w_230849f3-daef-4f23-9839-8f0bd76d8e4a/manager/3.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.099562 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/util/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.202096 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/util/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.228153 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/pull/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.237679 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/pull/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.440451 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/util/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.441799 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/pull/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.489856 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e9b406d0c1d2aea76df313e9f99efd7b723e69ce1f5778051c6023a4e6t8rml_ecdbbc60-230b-4da2-a5bb-0623c9d6e31f/extract/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.643622 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-68b95954c9-gb5r4_e9e99ec6-68ec-4d48-847b-b5f350dc1fc4/kube-rbac-proxy/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.725823 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-68b95954c9-gb5r4_e9e99ec6-68ec-4d48-847b-b5f350dc1fc4/manager/3.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.729483 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-68b95954c9-gb5r4_e9e99ec6-68ec-4d48-847b-b5f350dc1fc4/manager/2.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.868384 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-774b86978c-w8cc5_0c0a5e22-8150-48b6-9b4f-a9b18bb4960f/kube-rbac-proxy/0.log" Nov 25 17:14:57 crc kubenswrapper[4879]: I1125 17:14:57.897059 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-774b86978c-w8cc5_0c0a5e22-8150-48b6-9b4f-a9b18bb4960f/manager/3.log" Nov 25 17:14:58 crc kubenswrapper[4879]: I1125 17:14:58.739768 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-774b86978c-w8cc5_0c0a5e22-8150-48b6-9b4f-a9b18bb4960f/manager/2.log" Nov 25 17:14:58 crc kubenswrapper[4879]: I1125 17:14:58.806911 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c9694994-ltnmm_cefe5024-a03a-427e-84a5-a4f6eac64f12/kube-rbac-proxy/0.log" Nov 25 17:14:58 crc kubenswrapper[4879]: I1125 17:14:58.845662 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c9694994-ltnmm_cefe5024-a03a-427e-84a5-a4f6eac64f12/manager/2.log" Nov 25 17:14:58 crc kubenswrapper[4879]: I1125 17:14:58.934702 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c9694994-ltnmm_cefe5024-a03a-427e-84a5-a4f6eac64f12/manager/1.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.005643 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-d5cc86f4b-8tdb9_afbf9f55-3316-40bb-b53b-d4d96482f9d5/kube-rbac-proxy/0.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.220656 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-d5cc86f4b-8tdb9_afbf9f55-3316-40bb-b53b-d4d96482f9d5/manager/1.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.294539 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-d5cc86f4b-8tdb9_afbf9f55-3316-40bb-b53b-d4d96482f9d5/manager/2.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.323240 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5bfcdc958c-8tgzv_be0d238d-5b08-42e1-ac21-4e00592ab433/kube-rbac-proxy/0.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.435160 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5bfcdc958c-8tgzv_be0d238d-5b08-42e1-ac21-4e00592ab433/manager/3.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.464885 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5bfcdc958c-8tgzv_be0d238d-5b08-42e1-ac21-4e00592ab433/manager/2.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.543618 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-748dc6576f-5rjs9_89a61837-ab76-494d-a98d-268fed9bbe35/kube-rbac-proxy/0.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.576043 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-748dc6576f-5rjs9_89a61837-ab76-494d-a98d-268fed9bbe35/manager/2.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.645735 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-748dc6576f-5rjs9_89a61837-ab76-494d-a98d-268fed9bbe35/manager/1.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.658256 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:14:59 crc kubenswrapper[4879]: E1125 17:14:59.658639 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.743709 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58bb8d67cc-m7d57_bac13c1a-af96-4cb0-a802-ef2086f9f06b/kube-rbac-proxy/0.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.791199 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58bb8d67cc-m7d57_bac13c1a-af96-4cb0-a802-ef2086f9f06b/manager/3.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.816706 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58bb8d67cc-m7d57_bac13c1a-af96-4cb0-a802-ef2086f9f06b/manager/2.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.886372 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-cb6c4fdb7-95pqd_279425db-228b-4697-864f-e50d2eb66012/kube-rbac-proxy/0.log" Nov 25 17:14:59 crc kubenswrapper[4879]: I1125 17:14:59.936489 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-cb6c4fdb7-95pqd_279425db-228b-4697-864f-e50d2eb66012/manager/3.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.003801 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-cb6c4fdb7-95pqd_279425db-228b-4697-864f-e50d2eb66012/manager/2.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.063092 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7c57c8bbc4-7mc5t_8f91f389-91ad-4a56-9e71-5cf7bb88db01/kube-rbac-proxy/0.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.154579 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp"] Nov 25 17:15:00 crc kubenswrapper[4879]: E1125 17:15:00.155558 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="extract-content" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.155595 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="extract-content" Nov 25 17:15:00 crc kubenswrapper[4879]: E1125 17:15:00.155620 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="extract-utilities" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.155630 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="extract-utilities" Nov 25 17:15:00 crc kubenswrapper[4879]: E1125 17:15:00.155696 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="registry-server" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.155704 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="registry-server" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.155747 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7c57c8bbc4-7mc5t_8f91f389-91ad-4a56-9e71-5cf7bb88db01/manager/2.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.156059 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e30be64-aa52-4604-960f-e1fb28c6a87e" containerName="registry-server" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.157495 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.163070 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7c57c8bbc4-7mc5t_8f91f389-91ad-4a56-9e71-5cf7bb88db01/manager/3.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.163288 4879 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.163442 4879 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.172526 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp"] Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.209072 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vdp9\" (UniqueName: \"kubernetes.io/projected/aad67702-0db0-4506-a450-532a2adeff53-kube-api-access-6vdp9\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.209114 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aad67702-0db0-4506-a450-532a2adeff53-config-volume\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.209234 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aad67702-0db0-4506-a450-532a2adeff53-secret-volume\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.310856 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vdp9\" (UniqueName: \"kubernetes.io/projected/aad67702-0db0-4506-a450-532a2adeff53-kube-api-access-6vdp9\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.310912 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aad67702-0db0-4506-a450-532a2adeff53-config-volume\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.310982 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aad67702-0db0-4506-a450-532a2adeff53-secret-volume\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.311998 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aad67702-0db0-4506-a450-532a2adeff53-config-volume\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.316945 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aad67702-0db0-4506-a450-532a2adeff53-secret-volume\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.333764 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-mq9fj_c5c5776f-3970-425a-b5a7-c4c859f821e0/kube-rbac-proxy/0.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.336861 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vdp9\" (UniqueName: \"kubernetes.io/projected/aad67702-0db0-4506-a450-532a2adeff53-kube-api-access-6vdp9\") pod \"collect-profiles-29401515-r7kmp\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.439078 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-mq9fj_c5c5776f-3970-425a-b5a7-c4c859f821e0/manager/1.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.488954 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-mq9fj_c5c5776f-3970-425a-b5a7-c4c859f821e0/manager/2.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.494740 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.564648 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-fd75fd47d-txbsq_5c199b9d-786f-4520-a7bb-67f616b16b88/kube-rbac-proxy/0.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.689531 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-fd75fd47d-txbsq_5c199b9d-786f-4520-a7bb-67f616b16b88/manager/1.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.748586 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf_2e38fffb-d3ee-488d-bbc7-811d4ba43797/kube-rbac-proxy/0.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.749689 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-fd75fd47d-txbsq_5c199b9d-786f-4520-a7bb-67f616b16b88/manager/2.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.807232 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf_2e38fffb-d3ee-488d-bbc7-811d4ba43797/manager/1.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.925015 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-544b9bb9-hxwnf_2e38fffb-d3ee-488d-bbc7-811d4ba43797/manager/0.log" Nov 25 17:15:00 crc kubenswrapper[4879]: I1125 17:15:00.971201 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-77bf44fb75-xdhlf_b7d6b37e-0aff-4496-b240-7770d1d23827/manager/1.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.002256 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp"] Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.255889 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" event={"ID":"aad67702-0db0-4506-a450-532a2adeff53","Type":"ContainerStarted","Data":"01ad19b7cc59e411dfec6255eff6023b6b90305e4848c92f2491b11b8873b37b"} Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.256221 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" event={"ID":"aad67702-0db0-4506-a450-532a2adeff53","Type":"ContainerStarted","Data":"0eb62b6558e2c2f280bd81f707f306446841eb50de9b21831ff0ea2eb5ad2e1b"} Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.276550 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" podStartSLOduration=1.27652827 podStartE2EDuration="1.27652827s" podCreationTimestamp="2025-11-25 17:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 17:15:01.272502273 +0000 UTC m=+10192.875915344" watchObservedRunningTime="2025-11-25 17:15:01.27652827 +0000 UTC m=+10192.879941341" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.303332 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5fd4b8b4b5-64rqt_5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24/operator/1.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.329521 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-7nr95_df6e65ac-24ef-413b-87e9-09f1a13e9d60/registry-server/0.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.506407 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5fd4b8b4b5-64rqt_5a7c0ce6-7da5-4f42-bd44-4bcdf67b9b24/operator/0.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.593222 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-66cf5c67ff-pr6k5_94c3a712-5baa-4789-ad81-8d4c0554d84b/kube-rbac-proxy/0.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.678974 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-66cf5c67ff-pr6k5_94c3a712-5baa-4789-ad81-8d4c0554d84b/manager/2.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.748687 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-66cf5c67ff-pr6k5_94c3a712-5baa-4789-ad81-8d4c0554d84b/manager/1.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.829402 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5db546f9d9-nk7tw_4ca6d024-306e-4707-abb0-1b57ed1e11b6/kube-rbac-proxy/0.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.977327 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5db546f9d9-nk7tw_4ca6d024-306e-4707-abb0-1b57ed1e11b6/manager/1.log" Nov 25 17:15:01 crc kubenswrapper[4879]: I1125 17:15:01.988936 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5db546f9d9-nk7tw_4ca6d024-306e-4707-abb0-1b57ed1e11b6/manager/2.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.059772 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-zj2cr_ee61acb4-f03b-4e5c-996c-3b4436b8e676/operator/3.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.262187 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-zj2cr_ee61acb4-f03b-4e5c-996c-3b4436b8e676/operator/2.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.286574 4879 generic.go:334] "Generic (PLEG): container finished" podID="aad67702-0db0-4506-a450-532a2adeff53" containerID="01ad19b7cc59e411dfec6255eff6023b6b90305e4848c92f2491b11b8873b37b" exitCode=0 Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.286618 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" event={"ID":"aad67702-0db0-4506-a450-532a2adeff53","Type":"ContainerDied","Data":"01ad19b7cc59e411dfec6255eff6023b6b90305e4848c92f2491b11b8873b37b"} Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.289710 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6fdc4fcf86-pf28x_81a1e752-3477-4e08-b151-874b0e503a1b/kube-rbac-proxy/0.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.363362 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6fdc4fcf86-pf28x_81a1e752-3477-4e08-b151-874b0e503a1b/manager/2.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.502583 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-567f98c9d-zvb94_2037b3b9-3099-4f88-8e56-ec28ee25efa5/kube-rbac-proxy/0.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.513081 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6fdc4fcf86-pf28x_81a1e752-3477-4e08-b151-874b0e503a1b/manager/1.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.772006 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cb74df96-52qvx_9d5bb254-3519-4805-bbfa-c4fad026bec1/kube-rbac-proxy/0.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.773386 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-567f98c9d-zvb94_2037b3b9-3099-4f88-8e56-ec28ee25efa5/manager/1.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.803901 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-567f98c9d-zvb94_2037b3b9-3099-4f88-8e56-ec28ee25efa5/manager/2.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.977666 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cb74df96-52qvx_9d5bb254-3519-4805-bbfa-c4fad026bec1/manager/1.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.995289 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cb74df96-52qvx_9d5bb254-3519-4805-bbfa-c4fad026bec1/manager/0.log" Nov 25 17:15:02 crc kubenswrapper[4879]: I1125 17:15:02.995748 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-864885998-vnr67_06ee2ae7-d534-4170-9862-53a2580c39ce/kube-rbac-proxy/0.log" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.152797 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-864885998-vnr67_06ee2ae7-d534-4170-9862-53a2580c39ce/manager/2.log" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.268922 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-864885998-vnr67_06ee2ae7-d534-4170-9862-53a2580c39ce/manager/3.log" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.300303 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-77bf44fb75-xdhlf_b7d6b37e-0aff-4496-b240-7770d1d23827/manager/2.log" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.655978 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.716890 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aad67702-0db0-4506-a450-532a2adeff53-secret-volume\") pod \"aad67702-0db0-4506-a450-532a2adeff53\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.717169 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aad67702-0db0-4506-a450-532a2adeff53-config-volume\") pod \"aad67702-0db0-4506-a450-532a2adeff53\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.717294 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vdp9\" (UniqueName: \"kubernetes.io/projected/aad67702-0db0-4506-a450-532a2adeff53-kube-api-access-6vdp9\") pod \"aad67702-0db0-4506-a450-532a2adeff53\" (UID: \"aad67702-0db0-4506-a450-532a2adeff53\") " Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.718291 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad67702-0db0-4506-a450-532a2adeff53-config-volume" (OuterVolumeSpecName: "config-volume") pod "aad67702-0db0-4506-a450-532a2adeff53" (UID: "aad67702-0db0-4506-a450-532a2adeff53"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.739571 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aad67702-0db0-4506-a450-532a2adeff53-kube-api-access-6vdp9" (OuterVolumeSpecName: "kube-api-access-6vdp9") pod "aad67702-0db0-4506-a450-532a2adeff53" (UID: "aad67702-0db0-4506-a450-532a2adeff53"). InnerVolumeSpecName "kube-api-access-6vdp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.743003 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aad67702-0db0-4506-a450-532a2adeff53-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aad67702-0db0-4506-a450-532a2adeff53" (UID: "aad67702-0db0-4506-a450-532a2adeff53"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.819924 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vdp9\" (UniqueName: \"kubernetes.io/projected/aad67702-0db0-4506-a450-532a2adeff53-kube-api-access-6vdp9\") on node \"crc\" DevicePath \"\"" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.819958 4879 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aad67702-0db0-4506-a450-532a2adeff53-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 17:15:03 crc kubenswrapper[4879]: I1125 17:15:03.819971 4879 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aad67702-0db0-4506-a450-532a2adeff53-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 17:15:04 crc kubenswrapper[4879]: I1125 17:15:04.307712 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" event={"ID":"aad67702-0db0-4506-a450-532a2adeff53","Type":"ContainerDied","Data":"0eb62b6558e2c2f280bd81f707f306446841eb50de9b21831ff0ea2eb5ad2e1b"} Nov 25 17:15:04 crc kubenswrapper[4879]: I1125 17:15:04.307758 4879 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eb62b6558e2c2f280bd81f707f306446841eb50de9b21831ff0ea2eb5ad2e1b" Nov 25 17:15:04 crc kubenswrapper[4879]: I1125 17:15:04.307826 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401515-r7kmp" Nov 25 17:15:04 crc kubenswrapper[4879]: I1125 17:15:04.347619 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr"] Nov 25 17:15:04 crc kubenswrapper[4879]: I1125 17:15:04.355963 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401470-nt4wr"] Nov 25 17:15:05 crc kubenswrapper[4879]: I1125 17:15:05.661871 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44056d5d-48ba-4c54-8469-716b4ded7a02" path="/var/lib/kubelet/pods/44056d5d-48ba-4c54-8469-716b4ded7a02/volumes" Nov 25 17:15:10 crc kubenswrapper[4879]: I1125 17:15:10.645080 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:15:10 crc kubenswrapper[4879]: E1125 17:15:10.645975 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:15:21 crc kubenswrapper[4879]: I1125 17:15:21.329907 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-pznxn_f28e079b-4592-4e5e-a59f-d2a3bab40e6a/control-plane-machine-set-operator/0.log" Nov 25 17:15:21 crc kubenswrapper[4879]: I1125 17:15:21.526770 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-s72fm_796c6e04-6bb0-4119-8433-4c050955799d/machine-api-operator/0.log" Nov 25 17:15:21 crc kubenswrapper[4879]: I1125 17:15:21.552287 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-s72fm_796c6e04-6bb0-4119-8433-4c050955799d/kube-rbac-proxy/0.log" Nov 25 17:15:24 crc kubenswrapper[4879]: I1125 17:15:24.644828 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:15:24 crc kubenswrapper[4879]: E1125 17:15:24.645363 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:15:34 crc kubenswrapper[4879]: I1125 17:15:34.037311 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-z6dzl_2df9d6c8-3f76-4f2d-b7ff-c4874549babd/cert-manager-controller/1.log" Nov 25 17:15:34 crc kubenswrapper[4879]: I1125 17:15:34.069859 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-z6dzl_2df9d6c8-3f76-4f2d-b7ff-c4874549babd/cert-manager-controller/0.log" Nov 25 17:15:34 crc kubenswrapper[4879]: I1125 17:15:34.263023 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-bf2x5_3b39b6d7-9d19-4602-ad6e-c59a531818a8/cert-manager-cainjector/1.log" Nov 25 17:15:34 crc kubenswrapper[4879]: I1125 17:15:34.307790 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-bf2x5_3b39b6d7-9d19-4602-ad6e-c59a531818a8/cert-manager-cainjector/0.log" Nov 25 17:15:34 crc kubenswrapper[4879]: I1125 17:15:34.425222 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-gwv6b_99bc64d6-25b5-4e92-a4c7-4e744af93df6/cert-manager-webhook/0.log" Nov 25 17:15:39 crc kubenswrapper[4879]: I1125 17:15:39.658446 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:15:39 crc kubenswrapper[4879]: E1125 17:15:39.660733 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:15:48 crc kubenswrapper[4879]: I1125 17:15:48.570816 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5874bd7bc5-4wlrq_a7865254-ee4c-44ea-947e-b126835519db/nmstate-console-plugin/0.log" Nov 25 17:15:48 crc kubenswrapper[4879]: I1125 17:15:48.787645 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-9g2fw_19a3b86c-ed4d-436a-a58d-cd7027490ce0/kube-rbac-proxy/0.log" Nov 25 17:15:48 crc kubenswrapper[4879]: I1125 17:15:48.809796 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-9g2fw_19a3b86c-ed4d-436a-a58d-cd7027490ce0/nmstate-metrics/0.log" Nov 25 17:15:48 crc kubenswrapper[4879]: I1125 17:15:48.836394 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-f4xpk_1b17dab8-ef46-4084-b1f2-8a5e35c2a73c/nmstate-handler/0.log" Nov 25 17:15:49 crc kubenswrapper[4879]: I1125 17:15:49.010076 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-557fdffb88-cx9pc_547de8cf-5f37-47e7-80a8-c83f0f0ce36f/nmstate-operator/0.log" Nov 25 17:15:49 crc kubenswrapper[4879]: I1125 17:15:49.246982 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6b89b748d8-mxnc7_13269f38-1cf7-42da-b5b9-7b61d2693aeb/nmstate-webhook/0.log" Nov 25 17:15:50 crc kubenswrapper[4879]: I1125 17:15:50.645273 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:15:50 crc kubenswrapper[4879]: E1125 17:15:50.645866 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:15:59 crc kubenswrapper[4879]: I1125 17:15:59.963995 4879 scope.go:117] "RemoveContainer" containerID="0ba4715e88851c7ae24365695d76ca82ace1dca9a17296e640ef2f37366bab34" Nov 25 17:16:04 crc kubenswrapper[4879]: I1125 17:16:04.733706 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-5rg2m_4531c2f9-6d64-4cdd-8546-a4494fcdf027/kube-rbac-proxy/0.log" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.056913 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-frr-files/0.log" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.171634 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-5rg2m_4531c2f9-6d64-4cdd-8546-a4494fcdf027/controller/0.log" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.644368 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:16:05 crc kubenswrapper[4879]: E1125 17:16:05.644795 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.749380 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-frr-files/0.log" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.785407 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-metrics/0.log" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.790380 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-reloader/0.log" Nov 25 17:16:05 crc kubenswrapper[4879]: I1125 17:16:05.812105 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-reloader/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.031236 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-reloader/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.046449 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-frr-files/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.049246 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-metrics/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.101547 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-metrics/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.258263 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-reloader/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.268703 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-frr-files/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.288198 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/cp-metrics/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.335247 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/controller/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.454958 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/frr-metrics/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.486495 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/kube-rbac-proxy/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.610855 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/kube-rbac-proxy-frr/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.844340 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/reloader/0.log" Nov 25 17:16:06 crc kubenswrapper[4879]: I1125 17:16:06.862325 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-29xht_6b2f76a4-9bca-41a3-9be6-48dd06986803/frr-k8s-webhook-server/0.log" Nov 25 17:16:07 crc kubenswrapper[4879]: I1125 17:16:07.120931 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7965d46465-b9w8p_1bf9f1b8-1476-4f3a-963b-986a0ae66426/manager/3.log" Nov 25 17:16:07 crc kubenswrapper[4879]: I1125 17:16:07.191001 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7965d46465-b9w8p_1bf9f1b8-1476-4f3a-963b-986a0ae66426/manager/2.log" Nov 25 17:16:07 crc kubenswrapper[4879]: I1125 17:16:07.407589 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-b98dc7cc7-s6dps_d5400cd5-7909-4b72-92d6-f70a5f6cab2f/webhook-server/0.log" Nov 25 17:16:07 crc kubenswrapper[4879]: I1125 17:16:07.543204 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-45g57_043cbac7-0f97-43ed-a287-4af89ceaf905/kube-rbac-proxy/0.log" Nov 25 17:16:08 crc kubenswrapper[4879]: I1125 17:16:08.870734 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-45g57_043cbac7-0f97-43ed-a287-4af89ceaf905/speaker/0.log" Nov 25 17:16:09 crc kubenswrapper[4879]: I1125 17:16:09.985434 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vx7w9_b11c1717-f8f4-4838-acc9-7dc492d69268/frr/0.log" Nov 25 17:16:16 crc kubenswrapper[4879]: I1125 17:16:16.645349 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:16:16 crc kubenswrapper[4879]: E1125 17:16:16.646184 4879 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-64t7t_openshift-machine-config-operator(1f8529f4-b6ae-4467-ad94-67b1113f9d6b)\"" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.147925 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/util/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.386962 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/pull/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.430016 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/pull/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.433266 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/util/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.628661 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/util/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.675621 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/extract/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.681495 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931an5svd_5f9ead12-79a9-40a3-a327-7b281e40ff56/pull/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.868696 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/util/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.985523 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/pull/0.log" Nov 25 17:16:21 crc kubenswrapper[4879]: I1125 17:16:21.988089 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/pull/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.002708 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/util/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.190516 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/util/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.206590 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/extract/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.210504 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ezljgc_6c4acdfd-e627-4666-b24c-51cf346e3757/pull/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.369828 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/util/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.558108 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/pull/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.590822 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/util/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.611315 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/pull/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.728456 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/pull/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.733392 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/util/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.815200 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92107rgs9_320cbc1f-7c52-4487-80f2-3397d3abdb86/extract/0.log" Nov 25 17:16:22 crc kubenswrapper[4879]: I1125 17:16:22.928072 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/extract-utilities/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.073443 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/extract-utilities/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.110080 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/extract-content/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.115598 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/extract-content/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.291571 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/extract-utilities/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.297767 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/extract-content/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.458925 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/extract-utilities/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.708714 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/extract-content/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.714287 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/extract-utilities/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.738179 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/extract-content/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.908098 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/extract-utilities/0.log" Nov 25 17:16:23 crc kubenswrapper[4879]: I1125 17:16:23.956654 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/extract-content/0.log" Nov 25 17:16:24 crc kubenswrapper[4879]: I1125 17:16:24.202626 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/util/0.log" Nov 25 17:16:24 crc kubenswrapper[4879]: I1125 17:16:24.429854 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/pull/0.log" Nov 25 17:16:24 crc kubenswrapper[4879]: I1125 17:16:24.485072 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/util/0.log" Nov 25 17:16:24 crc kubenswrapper[4879]: I1125 17:16:24.606544 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/pull/0.log" Nov 25 17:16:24 crc kubenswrapper[4879]: I1125 17:16:24.845047 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/util/0.log" Nov 25 17:16:24 crc kubenswrapper[4879]: I1125 17:16:24.879781 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/pull/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.095195 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c68btpr_adde5dde-ce0c-484c-b4e4-7326b167e712/extract/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.143492 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-s8nth_ac0dfdc7-d570-411d-87c6-c3f58bdf04ad/marketplace-operator/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.314671 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/extract-utilities/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.566915 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/extract-utilities/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.583729 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/extract-content/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.605605 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k8tjf_6149d816-88ec-4de7-bdf4-ab7ddad33069/registry-server/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.661605 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/extract-content/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.743317 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cnspb_edfd5cef-7064-4111-9482-fd3714d4ee32/registry-server/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.840274 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/extract-utilities/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.870256 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/extract-content/0.log" Nov 25 17:16:25 crc kubenswrapper[4879]: I1125 17:16:25.959030 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/extract-utilities/0.log" Nov 25 17:16:26 crc kubenswrapper[4879]: I1125 17:16:26.150593 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/extract-content/0.log" Nov 25 17:16:26 crc kubenswrapper[4879]: I1125 17:16:26.179909 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/extract-content/0.log" Nov 25 17:16:26 crc kubenswrapper[4879]: I1125 17:16:26.189519 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/extract-utilities/0.log" Nov 25 17:16:26 crc kubenswrapper[4879]: I1125 17:16:26.205667 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ndg5n_77e0321b-48ed-499e-ac29-e06a8cb770ec/registry-server/0.log" Nov 25 17:16:26 crc kubenswrapper[4879]: I1125 17:16:26.392528 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/extract-content/0.log" Nov 25 17:16:26 crc kubenswrapper[4879]: I1125 17:16:26.427552 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/extract-utilities/0.log" Nov 25 17:16:27 crc kubenswrapper[4879]: I1125 17:16:27.229386 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-42t5z_4889c4d8-84da-4397-9b01-fa5f5695887b/registry-server/0.log" Nov 25 17:16:31 crc kubenswrapper[4879]: I1125 17:16:31.647597 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:16:32 crc kubenswrapper[4879]: I1125 17:16:32.286269 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"904e4a9c1c4b376b2ff8a3fcb95f8219e6f96026ce8b826b2bd1a22889bb5a6b"} Nov 25 17:16:37 crc kubenswrapper[4879]: I1125 17:16:37.959411 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-sgbxj_8228bbf9-6bfa-4f8c-a327-7e94cd658cfe/prometheus-operator/0.log" Nov 25 17:16:38 crc kubenswrapper[4879]: I1125 17:16:38.130261 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-74f5c5f6d4-qgz5v_ad6ade56-ff61-497e-944c-df0d2e9519e2/prometheus-operator-admission-webhook/0.log" Nov 25 17:16:38 crc kubenswrapper[4879]: I1125 17:16:38.172387 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-74f5c5f6d4-zrzhz_2af2c70b-ddca-4b26-997e-b8e1cb054796/prometheus-operator-admission-webhook/0.log" Nov 25 17:16:38 crc kubenswrapper[4879]: I1125 17:16:38.304960 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-8dgzm_98180543-60cf-4cb7-bfc2-6ae3ce782dc1/operator/0.log" Nov 25 17:16:38 crc kubenswrapper[4879]: I1125 17:16:38.359728 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-7v627_cc239cf3-61a6-4f20-9f66-a40e68fedd66/perses-operator/0.log" Nov 25 17:16:47 crc kubenswrapper[4879]: E1125 17:16:47.195912 4879 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.190:56410->38.102.83.190:33827: write tcp 38.102.83.190:56410->38.102.83.190:33827: write: broken pipe Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.420679 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h665l"] Nov 25 17:17:09 crc kubenswrapper[4879]: E1125 17:17:09.421644 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad67702-0db0-4506-a450-532a2adeff53" containerName="collect-profiles" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.421660 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad67702-0db0-4506-a450-532a2adeff53" containerName="collect-profiles" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.421910 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="aad67702-0db0-4506-a450-532a2adeff53" containerName="collect-profiles" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.423879 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.436703 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h665l"] Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.561182 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-utilities\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.561237 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-catalog-content\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.561373 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9jwl\" (UniqueName: \"kubernetes.io/projected/f0bd0b9c-053e-4322-91b5-6390441f3336-kube-api-access-q9jwl\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.663178 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-utilities\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.663485 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-catalog-content\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.663648 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9jwl\" (UniqueName: \"kubernetes.io/projected/f0bd0b9c-053e-4322-91b5-6390441f3336-kube-api-access-q9jwl\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.663785 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-utilities\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.663928 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-catalog-content\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.684078 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9jwl\" (UniqueName: \"kubernetes.io/projected/f0bd0b9c-053e-4322-91b5-6390441f3336-kube-api-access-q9jwl\") pod \"certified-operators-h665l\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:09 crc kubenswrapper[4879]: I1125 17:17:09.759332 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:10 crc kubenswrapper[4879]: I1125 17:17:10.295916 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h665l"] Nov 25 17:17:10 crc kubenswrapper[4879]: I1125 17:17:10.716257 4879 generic.go:334] "Generic (PLEG): container finished" podID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerID="f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971" exitCode=0 Nov 25 17:17:10 crc kubenswrapper[4879]: I1125 17:17:10.716302 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerDied","Data":"f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971"} Nov 25 17:17:10 crc kubenswrapper[4879]: I1125 17:17:10.716540 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerStarted","Data":"02c3bdaf1c5c93c021f6081a776770cb52d44f875cb5fc792fbffc17694e5418"} Nov 25 17:17:10 crc kubenswrapper[4879]: I1125 17:17:10.718762 4879 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 17:17:11 crc kubenswrapper[4879]: I1125 17:17:11.733570 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerStarted","Data":"8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86"} Nov 25 17:17:13 crc kubenswrapper[4879]: I1125 17:17:13.757053 4879 generic.go:334] "Generic (PLEG): container finished" podID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerID="8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86" exitCode=0 Nov 25 17:17:13 crc kubenswrapper[4879]: I1125 17:17:13.757160 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerDied","Data":"8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86"} Nov 25 17:17:14 crc kubenswrapper[4879]: I1125 17:17:14.774631 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerStarted","Data":"67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f"} Nov 25 17:17:14 crc kubenswrapper[4879]: I1125 17:17:14.802316 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h665l" podStartSLOduration=2.347794323 podStartE2EDuration="5.8022964s" podCreationTimestamp="2025-11-25 17:17:09 +0000 UTC" firstStartedPulling="2025-11-25 17:17:10.718551213 +0000 UTC m=+10322.321964284" lastFinishedPulling="2025-11-25 17:17:14.17305329 +0000 UTC m=+10325.776466361" observedRunningTime="2025-11-25 17:17:14.793927036 +0000 UTC m=+10326.397340107" watchObservedRunningTime="2025-11-25 17:17:14.8022964 +0000 UTC m=+10326.405709471" Nov 25 17:17:19 crc kubenswrapper[4879]: I1125 17:17:19.760340 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:19 crc kubenswrapper[4879]: I1125 17:17:19.761159 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:19 crc kubenswrapper[4879]: I1125 17:17:19.811209 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:19 crc kubenswrapper[4879]: I1125 17:17:19.881277 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:20 crc kubenswrapper[4879]: I1125 17:17:20.053483 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h665l"] Nov 25 17:17:21 crc kubenswrapper[4879]: I1125 17:17:21.847415 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h665l" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="registry-server" containerID="cri-o://67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f" gracePeriod=2 Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.351663 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.503044 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-catalog-content\") pod \"f0bd0b9c-053e-4322-91b5-6390441f3336\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.503210 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9jwl\" (UniqueName: \"kubernetes.io/projected/f0bd0b9c-053e-4322-91b5-6390441f3336-kube-api-access-q9jwl\") pod \"f0bd0b9c-053e-4322-91b5-6390441f3336\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.503704 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-utilities\") pod \"f0bd0b9c-053e-4322-91b5-6390441f3336\" (UID: \"f0bd0b9c-053e-4322-91b5-6390441f3336\") " Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.505169 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-utilities" (OuterVolumeSpecName: "utilities") pod "f0bd0b9c-053e-4322-91b5-6390441f3336" (UID: "f0bd0b9c-053e-4322-91b5-6390441f3336"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.508744 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0bd0b9c-053e-4322-91b5-6390441f3336-kube-api-access-q9jwl" (OuterVolumeSpecName: "kube-api-access-q9jwl") pod "f0bd0b9c-053e-4322-91b5-6390441f3336" (UID: "f0bd0b9c-053e-4322-91b5-6390441f3336"). InnerVolumeSpecName "kube-api-access-q9jwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.551879 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0bd0b9c-053e-4322-91b5-6390441f3336" (UID: "f0bd0b9c-053e-4322-91b5-6390441f3336"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.606280 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.606314 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9jwl\" (UniqueName: \"kubernetes.io/projected/f0bd0b9c-053e-4322-91b5-6390441f3336-kube-api-access-q9jwl\") on node \"crc\" DevicePath \"\"" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.606329 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0bd0b9c-053e-4322-91b5-6390441f3336-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.873837 4879 generic.go:334] "Generic (PLEG): container finished" podID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerID="67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f" exitCode=0 Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.873887 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerDied","Data":"67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f"} Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.873917 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h665l" event={"ID":"f0bd0b9c-053e-4322-91b5-6390441f3336","Type":"ContainerDied","Data":"02c3bdaf1c5c93c021f6081a776770cb52d44f875cb5fc792fbffc17694e5418"} Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.873937 4879 scope.go:117] "RemoveContainer" containerID="67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.874064 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h665l" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.904679 4879 scope.go:117] "RemoveContainer" containerID="8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.923466 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h665l"] Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.927469 4879 scope.go:117] "RemoveContainer" containerID="f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.934449 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h665l"] Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.974766 4879 scope.go:117] "RemoveContainer" containerID="67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f" Nov 25 17:17:22 crc kubenswrapper[4879]: E1125 17:17:22.975529 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f\": container with ID starting with 67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f not found: ID does not exist" containerID="67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.975596 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f"} err="failed to get container status \"67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f\": rpc error: code = NotFound desc = could not find container \"67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f\": container with ID starting with 67b400c44f34d891c7e001d7d5f1d17a88db1126d765e6f60d19332c4291b31f not found: ID does not exist" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.975630 4879 scope.go:117] "RemoveContainer" containerID="8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86" Nov 25 17:17:22 crc kubenswrapper[4879]: E1125 17:17:22.975941 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86\": container with ID starting with 8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86 not found: ID does not exist" containerID="8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.975971 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86"} err="failed to get container status \"8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86\": rpc error: code = NotFound desc = could not find container \"8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86\": container with ID starting with 8f2617e97238d942907c9d9bbc354e99c77c521de32bdaf7d721da15d4aa5a86 not found: ID does not exist" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.975996 4879 scope.go:117] "RemoveContainer" containerID="f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971" Nov 25 17:17:22 crc kubenswrapper[4879]: E1125 17:17:22.976252 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971\": container with ID starting with f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971 not found: ID does not exist" containerID="f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971" Nov 25 17:17:22 crc kubenswrapper[4879]: I1125 17:17:22.976292 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971"} err="failed to get container status \"f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971\": rpc error: code = NotFound desc = could not find container \"f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971\": container with ID starting with f87b6cdf1bc5a3a473dc4f28095710d196d706f1482e0f7399c24dcbc6678971 not found: ID does not exist" Nov 25 17:17:23 crc kubenswrapper[4879]: I1125 17:17:23.660384 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" path="/var/lib/kubelet/pods/f0bd0b9c-053e-4322-91b5-6390441f3336/volumes" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.049339 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-46rrx"] Nov 25 17:18:15 crc kubenswrapper[4879]: E1125 17:18:15.052264 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="registry-server" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.052434 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="registry-server" Nov 25 17:18:15 crc kubenswrapper[4879]: E1125 17:18:15.052562 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="extract-utilities" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.052677 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="extract-utilities" Nov 25 17:18:15 crc kubenswrapper[4879]: E1125 17:18:15.052823 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="extract-content" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.052909 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="extract-content" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.053395 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0bd0b9c-053e-4322-91b5-6390441f3336" containerName="registry-server" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.056550 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.073292 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46rrx"] Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.170540 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j42sk\" (UniqueName: \"kubernetes.io/projected/2195f2d4-fa81-41f3-a45b-782d4dc56b22-kube-api-access-j42sk\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.170651 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-utilities\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.170776 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-catalog-content\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.272627 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-catalog-content\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.273106 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j42sk\" (UniqueName: \"kubernetes.io/projected/2195f2d4-fa81-41f3-a45b-782d4dc56b22-kube-api-access-j42sk\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.273143 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-catalog-content\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.273285 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-utilities\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.273681 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-utilities\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.293241 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j42sk\" (UniqueName: \"kubernetes.io/projected/2195f2d4-fa81-41f3-a45b-782d4dc56b22-kube-api-access-j42sk\") pod \"community-operators-46rrx\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.393940 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:15 crc kubenswrapper[4879]: I1125 17:18:15.993027 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46rrx"] Nov 25 17:18:16 crc kubenswrapper[4879]: I1125 17:18:16.447709 4879 generic.go:334] "Generic (PLEG): container finished" podID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerID="c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787" exitCode=0 Nov 25 17:18:16 crc kubenswrapper[4879]: I1125 17:18:16.447874 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerDied","Data":"c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787"} Nov 25 17:18:16 crc kubenswrapper[4879]: I1125 17:18:16.448051 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerStarted","Data":"89334c654203d2eb69d858fad06b1e69ee5bd19061c01281f61a74486a440ff4"} Nov 25 17:18:18 crc kubenswrapper[4879]: I1125 17:18:18.472217 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerStarted","Data":"b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551"} Nov 25 17:18:22 crc kubenswrapper[4879]: I1125 17:18:22.555694 4879 generic.go:334] "Generic (PLEG): container finished" podID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerID="b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551" exitCode=0 Nov 25 17:18:22 crc kubenswrapper[4879]: I1125 17:18:22.555762 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerDied","Data":"b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551"} Nov 25 17:18:23 crc kubenswrapper[4879]: I1125 17:18:23.569003 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerStarted","Data":"ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa"} Nov 25 17:18:23 crc kubenswrapper[4879]: I1125 17:18:23.599494 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-46rrx" podStartSLOduration=2.039802219 podStartE2EDuration="8.599466119s" podCreationTimestamp="2025-11-25 17:18:15 +0000 UTC" firstStartedPulling="2025-11-25 17:18:16.449594211 +0000 UTC m=+10388.053007282" lastFinishedPulling="2025-11-25 17:18:23.009258111 +0000 UTC m=+10394.612671182" observedRunningTime="2025-11-25 17:18:23.593946929 +0000 UTC m=+10395.197360020" watchObservedRunningTime="2025-11-25 17:18:23.599466119 +0000 UTC m=+10395.202879200" Nov 25 17:18:25 crc kubenswrapper[4879]: I1125 17:18:25.394872 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:25 crc kubenswrapper[4879]: I1125 17:18:25.395496 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:25 crc kubenswrapper[4879]: I1125 17:18:25.440504 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:35 crc kubenswrapper[4879]: I1125 17:18:35.449100 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:35 crc kubenswrapper[4879]: I1125 17:18:35.511131 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46rrx"] Nov 25 17:18:35 crc kubenswrapper[4879]: I1125 17:18:35.706677 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-46rrx" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="registry-server" containerID="cri-o://ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa" gracePeriod=2 Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.269080 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.402272 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-catalog-content\") pod \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.402587 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-utilities\") pod \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.402786 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j42sk\" (UniqueName: \"kubernetes.io/projected/2195f2d4-fa81-41f3-a45b-782d4dc56b22-kube-api-access-j42sk\") pod \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\" (UID: \"2195f2d4-fa81-41f3-a45b-782d4dc56b22\") " Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.403167 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-utilities" (OuterVolumeSpecName: "utilities") pod "2195f2d4-fa81-41f3-a45b-782d4dc56b22" (UID: "2195f2d4-fa81-41f3-a45b-782d4dc56b22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.403486 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.408240 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2195f2d4-fa81-41f3-a45b-782d4dc56b22-kube-api-access-j42sk" (OuterVolumeSpecName: "kube-api-access-j42sk") pod "2195f2d4-fa81-41f3-a45b-782d4dc56b22" (UID: "2195f2d4-fa81-41f3-a45b-782d4dc56b22"). InnerVolumeSpecName "kube-api-access-j42sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.447856 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2195f2d4-fa81-41f3-a45b-782d4dc56b22" (UID: "2195f2d4-fa81-41f3-a45b-782d4dc56b22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.505824 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j42sk\" (UniqueName: \"kubernetes.io/projected/2195f2d4-fa81-41f3-a45b-782d4dc56b22-kube-api-access-j42sk\") on node \"crc\" DevicePath \"\"" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.505857 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2195f2d4-fa81-41f3-a45b-782d4dc56b22-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.719381 4879 generic.go:334] "Generic (PLEG): container finished" podID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerID="ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa" exitCode=0 Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.719488 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46rrx" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.719486 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerDied","Data":"ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa"} Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.719822 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46rrx" event={"ID":"2195f2d4-fa81-41f3-a45b-782d4dc56b22","Type":"ContainerDied","Data":"89334c654203d2eb69d858fad06b1e69ee5bd19061c01281f61a74486a440ff4"} Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.719843 4879 scope.go:117] "RemoveContainer" containerID="ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.757339 4879 scope.go:117] "RemoveContainer" containerID="b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.775043 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46rrx"] Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.784283 4879 scope.go:117] "RemoveContainer" containerID="c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.789699 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-46rrx"] Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.831335 4879 scope.go:117] "RemoveContainer" containerID="ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa" Nov 25 17:18:36 crc kubenswrapper[4879]: E1125 17:18:36.831774 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa\": container with ID starting with ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa not found: ID does not exist" containerID="ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.831808 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa"} err="failed to get container status \"ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa\": rpc error: code = NotFound desc = could not find container \"ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa\": container with ID starting with ef0f89e3ec601a1f106ead9eb0e553d15faefe7090c32380431230216858f2fa not found: ID does not exist" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.831829 4879 scope.go:117] "RemoveContainer" containerID="b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551" Nov 25 17:18:36 crc kubenswrapper[4879]: E1125 17:18:36.833749 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551\": container with ID starting with b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551 not found: ID does not exist" containerID="b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.833798 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551"} err="failed to get container status \"b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551\": rpc error: code = NotFound desc = could not find container \"b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551\": container with ID starting with b5d8434382c29a6dd6e8e7992e5cca58df5f696b94468856be90083fd4a7b551 not found: ID does not exist" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.833865 4879 scope.go:117] "RemoveContainer" containerID="c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787" Nov 25 17:18:36 crc kubenswrapper[4879]: E1125 17:18:36.834280 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787\": container with ID starting with c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787 not found: ID does not exist" containerID="c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787" Nov 25 17:18:36 crc kubenswrapper[4879]: I1125 17:18:36.834331 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787"} err="failed to get container status \"c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787\": rpc error: code = NotFound desc = could not find container \"c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787\": container with ID starting with c456abe8a8d66c4758e6e2372f2168de72d7d3114b9aa0f66a77c18bf7018787 not found: ID does not exist" Nov 25 17:18:37 crc kubenswrapper[4879]: I1125 17:18:37.673496 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" path="/var/lib/kubelet/pods/2195f2d4-fa81-41f3-a45b-782d4dc56b22/volumes" Nov 25 17:18:46 crc kubenswrapper[4879]: I1125 17:18:46.847985 4879 generic.go:334] "Generic (PLEG): container finished" podID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerID="655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369" exitCode=0 Nov 25 17:18:46 crc kubenswrapper[4879]: I1125 17:18:46.848102 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wbjbv/must-gather-tw82t" event={"ID":"9b12d2af-292c-4c47-8b96-7b9ef3bda01a","Type":"ContainerDied","Data":"655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369"} Nov 25 17:18:46 crc kubenswrapper[4879]: I1125 17:18:46.849560 4879 scope.go:117] "RemoveContainer" containerID="655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369" Nov 25 17:18:47 crc kubenswrapper[4879]: I1125 17:18:47.409751 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:18:47 crc kubenswrapper[4879]: I1125 17:18:47.409860 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:18:47 crc kubenswrapper[4879]: I1125 17:18:47.766421 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbjbv_must-gather-tw82t_9b12d2af-292c-4c47-8b96-7b9ef3bda01a/gather/0.log" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.306075 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wbjbv/must-gather-tw82t"] Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.306996 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wbjbv/must-gather-tw82t" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="copy" containerID="cri-o://eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0" gracePeriod=2 Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.321952 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wbjbv/must-gather-tw82t"] Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.761637 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbjbv_must-gather-tw82t_9b12d2af-292c-4c47-8b96-7b9ef3bda01a/copy/0.log" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.762380 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.899624 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dthsh\" (UniqueName: \"kubernetes.io/projected/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-kube-api-access-dthsh\") pod \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.899879 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-must-gather-output\") pod \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\" (UID: \"9b12d2af-292c-4c47-8b96-7b9ef3bda01a\") " Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.907348 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-kube-api-access-dthsh" (OuterVolumeSpecName: "kube-api-access-dthsh") pod "9b12d2af-292c-4c47-8b96-7b9ef3bda01a" (UID: "9b12d2af-292c-4c47-8b96-7b9ef3bda01a"). InnerVolumeSpecName "kube-api-access-dthsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.955403 4879 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wbjbv_must-gather-tw82t_9b12d2af-292c-4c47-8b96-7b9ef3bda01a/copy/0.log" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.956678 4879 generic.go:334] "Generic (PLEG): container finished" podID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerID="eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0" exitCode=143 Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.956731 4879 scope.go:117] "RemoveContainer" containerID="eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.956865 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wbjbv/must-gather-tw82t" Nov 25 17:18:56 crc kubenswrapper[4879]: I1125 17:18:56.991436 4879 scope.go:117] "RemoveContainer" containerID="655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.002610 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dthsh\" (UniqueName: \"kubernetes.io/projected/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-kube-api-access-dthsh\") on node \"crc\" DevicePath \"\"" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.036240 4879 scope.go:117] "RemoveContainer" containerID="eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0" Nov 25 17:18:57 crc kubenswrapper[4879]: E1125 17:18:57.038483 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0\": container with ID starting with eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0 not found: ID does not exist" containerID="eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.038573 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0"} err="failed to get container status \"eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0\": rpc error: code = NotFound desc = could not find container \"eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0\": container with ID starting with eaf0255026d1dd50f0dcacc87eacf1f5dd5653f2aaeabdc9af28bf36144837d0 not found: ID does not exist" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.038606 4879 scope.go:117] "RemoveContainer" containerID="655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369" Nov 25 17:18:57 crc kubenswrapper[4879]: E1125 17:18:57.038919 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369\": container with ID starting with 655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369 not found: ID does not exist" containerID="655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.038948 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369"} err="failed to get container status \"655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369\": rpc error: code = NotFound desc = could not find container \"655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369\": container with ID starting with 655b135ae7f2ca83f669d7e555c206a190a6f5f1121e04200492e7669ff0a369 not found: ID does not exist" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.089232 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "9b12d2af-292c-4c47-8b96-7b9ef3bda01a" (UID: "9b12d2af-292c-4c47-8b96-7b9ef3bda01a"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.104020 4879 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9b12d2af-292c-4c47-8b96-7b9ef3bda01a-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 25 17:18:57 crc kubenswrapper[4879]: I1125 17:18:57.657921 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" path="/var/lib/kubelet/pods/9b12d2af-292c-4c47-8b96-7b9ef3bda01a/volumes" Nov 25 17:19:17 crc kubenswrapper[4879]: I1125 17:19:17.411141 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:19:17 crc kubenswrapper[4879]: I1125 17:19:17.411728 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:19:47 crc kubenswrapper[4879]: I1125 17:19:47.408934 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:19:47 crc kubenswrapper[4879]: I1125 17:19:47.409478 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 17:19:47 crc kubenswrapper[4879]: I1125 17:19:47.409527 4879 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" Nov 25 17:19:47 crc kubenswrapper[4879]: I1125 17:19:47.410017 4879 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"904e4a9c1c4b376b2ff8a3fcb95f8219e6f96026ce8b826b2bd1a22889bb5a6b"} pod="openshift-machine-config-operator/machine-config-daemon-64t7t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 17:19:47 crc kubenswrapper[4879]: I1125 17:19:47.410077 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" containerID="cri-o://904e4a9c1c4b376b2ff8a3fcb95f8219e6f96026ce8b826b2bd1a22889bb5a6b" gracePeriod=600 Nov 25 17:19:48 crc kubenswrapper[4879]: I1125 17:19:48.561557 4879 generic.go:334] "Generic (PLEG): container finished" podID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerID="904e4a9c1c4b376b2ff8a3fcb95f8219e6f96026ce8b826b2bd1a22889bb5a6b" exitCode=0 Nov 25 17:19:48 crc kubenswrapper[4879]: I1125 17:19:48.561692 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerDied","Data":"904e4a9c1c4b376b2ff8a3fcb95f8219e6f96026ce8b826b2bd1a22889bb5a6b"} Nov 25 17:19:48 crc kubenswrapper[4879]: I1125 17:19:48.561910 4879 scope.go:117] "RemoveContainer" containerID="7021c9948f8d56fdf09bca8111e5b1d28088645976af6955931e42c1bad29206" Nov 25 17:19:49 crc kubenswrapper[4879]: I1125 17:19:49.584807 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" event={"ID":"1f8529f4-b6ae-4467-ad94-67b1113f9d6b","Type":"ContainerStarted","Data":"2e2c2221944084bbf986a335da23f47635932047a96731b5ba6d99e8aa6c0fb4"} Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.889281 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-822nv"] Nov 25 17:21:28 crc kubenswrapper[4879]: E1125 17:21:28.890224 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="gather" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890237 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="gather" Nov 25 17:21:28 crc kubenswrapper[4879]: E1125 17:21:28.890259 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="extract-content" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890265 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="extract-content" Nov 25 17:21:28 crc kubenswrapper[4879]: E1125 17:21:28.890282 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="extract-utilities" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890289 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="extract-utilities" Nov 25 17:21:28 crc kubenswrapper[4879]: E1125 17:21:28.890302 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="copy" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890308 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="copy" Nov 25 17:21:28 crc kubenswrapper[4879]: E1125 17:21:28.890326 4879 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="registry-server" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890331 4879 state_mem.go:107] "Deleted CPUSet assignment" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="registry-server" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890540 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="gather" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890558 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="2195f2d4-fa81-41f3-a45b-782d4dc56b22" containerName="registry-server" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.890569 4879 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b12d2af-292c-4c47-8b96-7b9ef3bda01a" containerName="copy" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.892194 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:28 crc kubenswrapper[4879]: I1125 17:21:28.922219 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-822nv"] Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.089574 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-utilities\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.089995 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-catalog-content\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.090081 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l4l7\" (UniqueName: \"kubernetes.io/projected/56349510-dc4d-4578-964b-d9d627b29a00-kube-api-access-4l4l7\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.090444 4879 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dj9kh"] Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.092519 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.112693 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dj9kh"] Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192130 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-catalog-content\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192185 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l4l7\" (UniqueName: \"kubernetes.io/projected/56349510-dc4d-4578-964b-d9d627b29a00-kube-api-access-4l4l7\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192288 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-utilities\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192361 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsnp8\" (UniqueName: \"kubernetes.io/projected/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-kube-api-access-xsnp8\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192401 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-utilities\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192427 4879 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-catalog-content\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192569 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-catalog-content\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.192709 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-utilities\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.211636 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l4l7\" (UniqueName: \"kubernetes.io/projected/56349510-dc4d-4578-964b-d9d627b29a00-kube-api-access-4l4l7\") pod \"redhat-marketplace-822nv\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.217854 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.294712 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsnp8\" (UniqueName: \"kubernetes.io/projected/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-kube-api-access-xsnp8\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.295087 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-utilities\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.295330 4879 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-catalog-content\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.295624 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-utilities\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.295742 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-catalog-content\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.311463 4879 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsnp8\" (UniqueName: \"kubernetes.io/projected/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-kube-api-access-xsnp8\") pod \"redhat-operators-dj9kh\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.428631 4879 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:29 crc kubenswrapper[4879]: I1125 17:21:29.769502 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-822nv"] Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.008367 4879 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dj9kh"] Nov 25 17:21:30 crc kubenswrapper[4879]: W1125 17:21:30.040001 4879 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod580a75ff_d466_4cd0_97e0_f0f9194f9fa1.slice/crio-0d948771308db1b3e0650891f3e87303b983d595b78bbf315bda457fcd39d1a7 WatchSource:0}: Error finding container 0d948771308db1b3e0650891f3e87303b983d595b78bbf315bda457fcd39d1a7: Status 404 returned error can't find the container with id 0d948771308db1b3e0650891f3e87303b983d595b78bbf315bda457fcd39d1a7 Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.693172 4879 generic.go:334] "Generic (PLEG): container finished" podID="580a75ff-d466-4cd0-97e0-f0f9194f9fa1" containerID="4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815" exitCode=0 Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.693294 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerDied","Data":"4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815"} Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.693581 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerStarted","Data":"0d948771308db1b3e0650891f3e87303b983d595b78bbf315bda457fcd39d1a7"} Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.695300 4879 generic.go:334] "Generic (PLEG): container finished" podID="56349510-dc4d-4578-964b-d9d627b29a00" containerID="fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c" exitCode=0 Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.695335 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerDied","Data":"fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c"} Nov 25 17:21:30 crc kubenswrapper[4879]: I1125 17:21:30.695356 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerStarted","Data":"b9549875746dc0c63e3f2d7249d07f81594f29f749674f8f633d77b81733d256"} Nov 25 17:21:31 crc kubenswrapper[4879]: I1125 17:21:31.708073 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerStarted","Data":"80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213"} Nov 25 17:21:32 crc kubenswrapper[4879]: I1125 17:21:32.722923 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerStarted","Data":"85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77"} Nov 25 17:21:32 crc kubenswrapper[4879]: I1125 17:21:32.727467 4879 generic.go:334] "Generic (PLEG): container finished" podID="56349510-dc4d-4578-964b-d9d627b29a00" containerID="80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213" exitCode=0 Nov 25 17:21:32 crc kubenswrapper[4879]: I1125 17:21:32.727591 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerDied","Data":"80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213"} Nov 25 17:21:34 crc kubenswrapper[4879]: I1125 17:21:34.755713 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerStarted","Data":"db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076"} Nov 25 17:21:34 crc kubenswrapper[4879]: I1125 17:21:34.781077 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-822nv" podStartSLOduration=3.8132693890000002 podStartE2EDuration="6.781051661s" podCreationTimestamp="2025-11-25 17:21:28 +0000 UTC" firstStartedPulling="2025-11-25 17:21:30.69738221 +0000 UTC m=+10582.300795281" lastFinishedPulling="2025-11-25 17:21:33.665164482 +0000 UTC m=+10585.268577553" observedRunningTime="2025-11-25 17:21:34.773296141 +0000 UTC m=+10586.376709222" watchObservedRunningTime="2025-11-25 17:21:34.781051661 +0000 UTC m=+10586.384464742" Nov 25 17:21:37 crc kubenswrapper[4879]: I1125 17:21:37.788053 4879 generic.go:334] "Generic (PLEG): container finished" podID="580a75ff-d466-4cd0-97e0-f0f9194f9fa1" containerID="85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77" exitCode=0 Nov 25 17:21:37 crc kubenswrapper[4879]: I1125 17:21:37.788112 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerDied","Data":"85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77"} Nov 25 17:21:38 crc kubenswrapper[4879]: I1125 17:21:38.801707 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerStarted","Data":"39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7"} Nov 25 17:21:38 crc kubenswrapper[4879]: I1125 17:21:38.834534 4879 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dj9kh" podStartSLOduration=2.350343617 podStartE2EDuration="9.834508115s" podCreationTimestamp="2025-11-25 17:21:29 +0000 UTC" firstStartedPulling="2025-11-25 17:21:30.697446261 +0000 UTC m=+10582.300859332" lastFinishedPulling="2025-11-25 17:21:38.181610739 +0000 UTC m=+10589.785023830" observedRunningTime="2025-11-25 17:21:38.826524938 +0000 UTC m=+10590.429938019" watchObservedRunningTime="2025-11-25 17:21:38.834508115 +0000 UTC m=+10590.437921196" Nov 25 17:21:39 crc kubenswrapper[4879]: I1125 17:21:39.218329 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:39 crc kubenswrapper[4879]: I1125 17:21:39.218410 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:39 crc kubenswrapper[4879]: I1125 17:21:39.294234 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:39 crc kubenswrapper[4879]: I1125 17:21:39.429698 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:39 crc kubenswrapper[4879]: I1125 17:21:39.429772 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:39 crc kubenswrapper[4879]: I1125 17:21:39.876239 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:40 crc kubenswrapper[4879]: I1125 17:21:40.498597 4879 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dj9kh" podUID="580a75ff-d466-4cd0-97e0-f0f9194f9fa1" containerName="registry-server" probeResult="failure" output=< Nov 25 17:21:40 crc kubenswrapper[4879]: timeout: failed to connect service ":50051" within 1s Nov 25 17:21:40 crc kubenswrapper[4879]: > Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.082308 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-822nv"] Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.082785 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-822nv" podUID="56349510-dc4d-4578-964b-d9d627b29a00" containerName="registry-server" containerID="cri-o://db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076" gracePeriod=2 Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.616707 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.759752 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-catalog-content\") pod \"56349510-dc4d-4578-964b-d9d627b29a00\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.759928 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l4l7\" (UniqueName: \"kubernetes.io/projected/56349510-dc4d-4578-964b-d9d627b29a00-kube-api-access-4l4l7\") pod \"56349510-dc4d-4578-964b-d9d627b29a00\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.760561 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-utilities\") pod \"56349510-dc4d-4578-964b-d9d627b29a00\" (UID: \"56349510-dc4d-4578-964b-d9d627b29a00\") " Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.761623 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-utilities" (OuterVolumeSpecName: "utilities") pod "56349510-dc4d-4578-964b-d9d627b29a00" (UID: "56349510-dc4d-4578-964b-d9d627b29a00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.771140 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56349510-dc4d-4578-964b-d9d627b29a00-kube-api-access-4l4l7" (OuterVolumeSpecName: "kube-api-access-4l4l7") pod "56349510-dc4d-4578-964b-d9d627b29a00" (UID: "56349510-dc4d-4578-964b-d9d627b29a00"). InnerVolumeSpecName "kube-api-access-4l4l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.776562 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56349510-dc4d-4578-964b-d9d627b29a00" (UID: "56349510-dc4d-4578-964b-d9d627b29a00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.860114 4879 generic.go:334] "Generic (PLEG): container finished" podID="56349510-dc4d-4578-964b-d9d627b29a00" containerID="db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076" exitCode=0 Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.860215 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerDied","Data":"db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076"} Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.860253 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-822nv" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.860285 4879 scope.go:117] "RemoveContainer" containerID="db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.860263 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-822nv" event={"ID":"56349510-dc4d-4578-964b-d9d627b29a00","Type":"ContainerDied","Data":"b9549875746dc0c63e3f2d7249d07f81594f29f749674f8f633d77b81733d256"} Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.863664 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.863705 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56349510-dc4d-4578-964b-d9d627b29a00-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.863725 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l4l7\" (UniqueName: \"kubernetes.io/projected/56349510-dc4d-4578-964b-d9d627b29a00-kube-api-access-4l4l7\") on node \"crc\" DevicePath \"\"" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.891986 4879 scope.go:117] "RemoveContainer" containerID="80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.909793 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-822nv"] Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.923374 4879 scope.go:117] "RemoveContainer" containerID="fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.927297 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-822nv"] Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.970444 4879 scope.go:117] "RemoveContainer" containerID="db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076" Nov 25 17:21:42 crc kubenswrapper[4879]: E1125 17:21:42.970890 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076\": container with ID starting with db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076 not found: ID does not exist" containerID="db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.970922 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076"} err="failed to get container status \"db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076\": rpc error: code = NotFound desc = could not find container \"db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076\": container with ID starting with db702907920b90469577823810124b05593470fe53c884afd264cd7b4ae52076 not found: ID does not exist" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.970944 4879 scope.go:117] "RemoveContainer" containerID="80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213" Nov 25 17:21:42 crc kubenswrapper[4879]: E1125 17:21:42.971335 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213\": container with ID starting with 80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213 not found: ID does not exist" containerID="80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.971367 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213"} err="failed to get container status \"80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213\": rpc error: code = NotFound desc = could not find container \"80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213\": container with ID starting with 80d1640b34ae786a6be4969652a9c9022cc0fa2771c3286c715f2a7c42363213 not found: ID does not exist" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.971383 4879 scope.go:117] "RemoveContainer" containerID="fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c" Nov 25 17:21:42 crc kubenswrapper[4879]: E1125 17:21:42.971722 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c\": container with ID starting with fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c not found: ID does not exist" containerID="fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c" Nov 25 17:21:42 crc kubenswrapper[4879]: I1125 17:21:42.971742 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c"} err="failed to get container status \"fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c\": rpc error: code = NotFound desc = could not find container \"fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c\": container with ID starting with fcc2660e54be2fe7ceee0057b4517a7993d9a6fed92f6ad6fab769e8f054d86c not found: ID does not exist" Nov 25 17:21:43 crc kubenswrapper[4879]: I1125 17:21:43.670467 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56349510-dc4d-4578-964b-d9d627b29a00" path="/var/lib/kubelet/pods/56349510-dc4d-4578-964b-d9d627b29a00/volumes" Nov 25 17:21:49 crc kubenswrapper[4879]: I1125 17:21:49.484838 4879 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:49 crc kubenswrapper[4879]: I1125 17:21:49.544586 4879 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:49 crc kubenswrapper[4879]: I1125 17:21:49.733885 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dj9kh"] Nov 25 17:21:50 crc kubenswrapper[4879]: I1125 17:21:50.974050 4879 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dj9kh" podUID="580a75ff-d466-4cd0-97e0-f0f9194f9fa1" containerName="registry-server" containerID="cri-o://39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7" gracePeriod=2 Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.528651 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.610783 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-catalog-content\") pod \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.611449 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsnp8\" (UniqueName: \"kubernetes.io/projected/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-kube-api-access-xsnp8\") pod \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.611738 4879 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-utilities\") pod \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\" (UID: \"580a75ff-d466-4cd0-97e0-f0f9194f9fa1\") " Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.612620 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-utilities" (OuterVolumeSpecName: "utilities") pod "580a75ff-d466-4cd0-97e0-f0f9194f9fa1" (UID: "580a75ff-d466-4cd0-97e0-f0f9194f9fa1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.618373 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-kube-api-access-xsnp8" (OuterVolumeSpecName: "kube-api-access-xsnp8") pod "580a75ff-d466-4cd0-97e0-f0f9194f9fa1" (UID: "580a75ff-d466-4cd0-97e0-f0f9194f9fa1"). InnerVolumeSpecName "kube-api-access-xsnp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.714403 4879 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.714557 4879 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsnp8\" (UniqueName: \"kubernetes.io/projected/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-kube-api-access-xsnp8\") on node \"crc\" DevicePath \"\"" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.736947 4879 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "580a75ff-d466-4cd0-97e0-f0f9194f9fa1" (UID: "580a75ff-d466-4cd0-97e0-f0f9194f9fa1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.816014 4879 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/580a75ff-d466-4cd0-97e0-f0f9194f9fa1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.996774 4879 generic.go:334] "Generic (PLEG): container finished" podID="580a75ff-d466-4cd0-97e0-f0f9194f9fa1" containerID="39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7" exitCode=0 Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.996831 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerDied","Data":"39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7"} Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.996882 4879 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dj9kh" event={"ID":"580a75ff-d466-4cd0-97e0-f0f9194f9fa1","Type":"ContainerDied","Data":"0d948771308db1b3e0650891f3e87303b983d595b78bbf315bda457fcd39d1a7"} Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.996907 4879 scope.go:117] "RemoveContainer" containerID="39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7" Nov 25 17:21:51 crc kubenswrapper[4879]: I1125 17:21:51.998307 4879 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dj9kh" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.025664 4879 scope.go:117] "RemoveContainer" containerID="85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.060208 4879 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dj9kh"] Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.068233 4879 scope.go:117] "RemoveContainer" containerID="4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.072512 4879 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dj9kh"] Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.133010 4879 scope.go:117] "RemoveContainer" containerID="39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7" Nov 25 17:21:52 crc kubenswrapper[4879]: E1125 17:21:52.134108 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7\": container with ID starting with 39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7 not found: ID does not exist" containerID="39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.134193 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7"} err="failed to get container status \"39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7\": rpc error: code = NotFound desc = could not find container \"39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7\": container with ID starting with 39ff6653d623dc601c8c9ff8e8897354bd956f1513dd2da7a49c0543fe6149c7 not found: ID does not exist" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.134229 4879 scope.go:117] "RemoveContainer" containerID="85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77" Nov 25 17:21:52 crc kubenswrapper[4879]: E1125 17:21:52.134712 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77\": container with ID starting with 85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77 not found: ID does not exist" containerID="85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.134761 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77"} err="failed to get container status \"85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77\": rpc error: code = NotFound desc = could not find container \"85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77\": container with ID starting with 85ef48d6ba5a3a1eaea7efac3a5ae5e41bd885c598f6c15e3b55bfa7654f0c77 not found: ID does not exist" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.134802 4879 scope.go:117] "RemoveContainer" containerID="4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815" Nov 25 17:21:52 crc kubenswrapper[4879]: E1125 17:21:52.135400 4879 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815\": container with ID starting with 4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815 not found: ID does not exist" containerID="4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815" Nov 25 17:21:52 crc kubenswrapper[4879]: I1125 17:21:52.135455 4879 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815"} err="failed to get container status \"4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815\": rpc error: code = NotFound desc = could not find container \"4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815\": container with ID starting with 4cfa4de0248b8ccd674a03f969a9c45ec59c2b97a54d9b076a0544f1cfc3d815 not found: ID does not exist" Nov 25 17:21:53 crc kubenswrapper[4879]: I1125 17:21:53.659924 4879 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="580a75ff-d466-4cd0-97e0-f0f9194f9fa1" path="/var/lib/kubelet/pods/580a75ff-d466-4cd0-97e0-f0f9194f9fa1/volumes" Nov 25 17:22:17 crc kubenswrapper[4879]: I1125 17:22:17.408916 4879 patch_prober.go:28] interesting pod/machine-config-daemon-64t7t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 17:22:17 crc kubenswrapper[4879]: I1125 17:22:17.409614 4879 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-64t7t" podUID="1f8529f4-b6ae-4467-ad94-67b1113f9d6b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515111362724024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015111362724017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015111335545016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015111335545015460 5ustar corecore